1
0
Fork 1
mirror of https://example.com synced 2024-11-22 11:26:38 +09:00

refactor: remove Meilisearch, Sonic, and Elasticsearch completely

This commit is contained in:
naskya 2023-12-29 06:23:17 +09:00
parent 209fee7a6d
commit bf045daaed
Signed by: naskya
GPG key ID: 712D413B3A9FED5C
21 changed files with 118 additions and 966 deletions

View file

@ -102,9 +102,6 @@ importers:
'@discordapp/twemoji':
specifier: ^15.0.2
version: 15.0.2
'@elastic/elasticsearch':
specifier: 8.11.0
version: 8.11.0
'@koa/cors':
specifier: 5.0.0
version: 5.0.0
@ -273,9 +270,6 @@ importers:
megalodon:
specifier: workspace:*
version: link:../megalodon
meilisearch:
specifier: 0.36.0
version: 0.36.0
mfm-js:
specifier: 0.24.0
version: 0.24.0
@ -372,9 +366,6 @@ importers:
sharp:
specifier: 0.33.1
version: 0.33.1
sonic-channel:
specifier: 1.3.1
version: 1.3.1
stringz:
specifier: 2.1.0
version: 2.1.0
@ -1714,30 +1705,6 @@ packages:
universalify: 0.1.2
dev: false
/@elastic/elasticsearch@8.11.0:
resolution: {integrity: sha512-1UEQFdGLuKdROLJnMTjegasRM3X9INm/PVADoIVgdTfuv6DeJ17UMuNwYSkCrLrC0trLjjGV4YganpbJJX/VLg==}
engines: {node: '>=18'}
dependencies:
'@elastic/transport': 8.4.0
tslib: 2.6.2
transitivePeerDependencies:
- supports-color
dev: false
/@elastic/transport@8.4.0:
resolution: {integrity: sha512-Yb3fDa7yGD0ca3uMbL64M3vM1cE5h5uHmBcTjkdB4VpCasRNKSd09iDpwqX8zX1tbBtxcaKYLceKthWvPeIxTw==}
engines: {node: '>=16'}
dependencies:
debug: 4.3.4
hpagent: 1.2.0
ms: 2.1.3
secure-json-parse: 2.7.0
tslib: 2.6.2
undici: 5.27.2
transitivePeerDependencies:
- supports-color
dev: false
/@emnapi/runtime@0.44.0:
resolution: {integrity: sha512-ZX/etZEZw8DR7zAB1eVQT40lNo0jeqpb6dCgOvctB6FIQ5PoXfMuNY8+ayQfu8tNQbAB8gQWSSJupR8NxeiZXw==}
requiresBuild: true
@ -7004,14 +6971,6 @@ packages:
cross-spawn: 7.0.3
dev: true
/cross-fetch@3.1.8:
resolution: {integrity: sha512-cvA+JwZoU0Xq+h6WkMvAUqPEYy92Obet6UdKLfW60qn99ftItKjB5T+BkyWOFWe2pUyfQ+IJHmpOTznqk1M6Kg==}
dependencies:
node-fetch: 2.7.0
transitivePeerDependencies:
- encoding
dev: false
/cross-spawn@5.1.0:
resolution: {integrity: sha512-pTgQJ5KC0d2hcY8eyL1IzlBPYjTkyH72XRZPnLyKus2mBfNjQs3klqbJU2VILqZryAZUt9JOb3h/mWMy23/f5A==}
dependencies:
@ -12338,14 +12297,6 @@ packages:
engines: {node: '>= 0.6'}
dev: false
/meilisearch@0.36.0:
resolution: {integrity: sha512-swcvEYrct0/zsGj3jlbPm1OYxbH14IURnlysKlXywNicIQ5EMkSYLYCLCwOuBKAaGcdISWdgdylH9TXVLegmOQ==}
dependencies:
cross-fetch: 3.1.8
transitivePeerDependencies:
- encoding
dev: false
/mem@9.0.2:
resolution: {integrity: sha512-F2t4YIv9XQUBHt6AOJ0y7lSmP1+cY7Fm1DRh9GClTGzKST7UWLMx6ly9WZdLH/G/ppM5RL4MlQfRT71ri9t19A==}
engines: {node: '>=12.20'}
@ -15065,10 +15016,6 @@ packages:
ajv-keywords: 3.5.2(ajv@6.12.6)
dev: true
/secure-json-parse@2.7.0:
resolution: {integrity: sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==}
dev: false
/seedrandom@2.4.2:
resolution: {integrity: sha512-uQ72txMoObtuJooiBLSVs5Yu2e9d/lHQz0boaqHjW8runXB9vR8nFtaZV54wYii613N0C8ZqTBLsfwDhAdpvqQ==}
dev: false
@ -15352,11 +15299,6 @@ packages:
smart-buffer: 4.2.0
dev: false
/sonic-channel@1.3.1:
resolution: {integrity: sha512-+K4IZVFE7Tf2DB4EFZ23xo7a/+gJaiOHhFzXVZpzkX6Rs/rvf4YbSxnEGdYw8mrTcjtpG+jLVQEhP8sNTtN5VA==}
engines: {node: '>= 6.0.0'}
dev: false
/sort-keys-length@1.0.1:
resolution: {integrity: sha512-GRbEOUqCxemTAk/b32F2xa8wDTs+Z1QHOkbhJDQTvv/6G3ZkbJ+frYWsTcc7cBB3Fu4wy4XlLCuNtJuMn7Gsvw==}
engines: {node: '>=0.10.0'}

View file

@ -28,7 +28,6 @@
"@bull-board/koa": "5.10.2",
"@bull-board/ui": "5.10.2",
"@discordapp/twemoji": "^15.0.2",
"@elastic/elasticsearch": "8.11.0",
"@koa/cors": "5.0.0",
"@koa/multer": "3.0.2",
"@koa/router": "12.0.1",
@ -85,7 +84,6 @@
"koa-send": "5.0.1",
"koa-slow": "2.1.0",
"megalodon": "workspace:*",
"meilisearch": "0.36.0",
"mfm-js": "0.24.0",
"mime-types": "2.1.35",
"msgpackr": "1.10.1",
@ -118,7 +116,6 @@
"sanitize-html": "2.11.0",
"semver": "7.5.4",
"sharp": "0.33.1",
"sonic-channel": "1.3.1",
"stringz": "2.1.0",
"summaly": "2.7.0",
"syslog-pro": "1.0.0",

View file

@ -37,27 +37,27 @@ export type Source = {
user?: string;
tls?: { [z: string]: string };
};
elasticsearch: {
host: string;
port: number;
ssl?: boolean;
user?: string;
pass?: string;
index?: string;
};
sonic: {
host: string;
port: number;
auth?: string;
collection?: string;
bucket?: string;
};
meilisearch: {
host: string;
port: number;
apiKey?: string;
ssl: boolean;
};
// elasticsearch: {
// host: string;
// port: number;
// ssl?: boolean;
// user?: string;
// pass?: string;
// index?: string;
// };
// sonic: {
// host: string;
// port: number;
// auth?: string;
// collection?: string;
// bucket?: string;
// };
// meilisearch: {
// host: string;
// port: number;
// apiKey?: string;
// ssl: boolean;
// };
proxy?: string;
proxySmtp?: string;

View file

@ -2,7 +2,7 @@ import si from "systeminformation";
import Xev from "xev";
import * as osUtils from "os-utils";
import { fetchMeta } from "@/misc/fetch-meta.js";
import meilisearch from "@/db/meilisearch.js";
// import meilisearch from "@/db/meilisearch.js";
const ev = new Xev();
@ -30,7 +30,7 @@ export default function () {
const memStats = await mem();
const netStats = await net();
const fsStats = await fs();
const meilisearchStats = await meilisearchStatus();
// const meilisearchStats = await meilisearchStatus();
const stats = {
cpu: roundCpu(cpu),
@ -47,7 +47,7 @@ export default function () {
r: round(Math.max(0, fsStats.rIO_sec ?? 0)),
w: round(Math.max(0, fsStats.wIO_sec ?? 0)),
},
meilisearch: meilisearchStats,
// meilisearch: meilisearchStats,
};
ev.emit("serverStats", stats);
log.unshift(stats);
@ -88,14 +88,14 @@ async function fs() {
}
// MEILI STAT
async function meilisearchStatus() {
if (meilisearch) {
return meilisearch.serverStats();
} else {
return {
health: "unconfigured",
size: 0,
indexed_count: 0,
};
}
}
// async function meilisearchStatus() {
// if (meilisearch) {
// return meilisearch.serverStats();
// } else {
// return {
// health: "unconfigured",
// size: 0,
// indexed_count: 0,
// };
// }
// }

View file

@ -1,65 +0,0 @@
import * as elasticsearch from "@elastic/elasticsearch";
import config from "@/config/index.js";
const index = {
settings: {
analysis: {
analyzer: {
ngram: {
tokenizer: "ngram",
},
},
},
},
mappings: {
properties: {
text: {
type: "text",
index: true,
analyzer: "ngram",
},
userId: {
type: "keyword",
index: true,
},
userHost: {
type: "keyword",
index: true,
},
},
},
};
// Init ElasticSearch connection
const client = config.elasticsearch
? new elasticsearch.Client({
node: `${config.elasticsearch.ssl ? "https://" : "http://"}${
config.elasticsearch.host
}:${config.elasticsearch.port}`,
auth:
config.elasticsearch.user && config.elasticsearch.pass
? {
username: config.elasticsearch.user,
password: config.elasticsearch.pass,
}
: undefined,
pingTimeout: 30000,
})
: null;
if (client) {
client.indices
.exists({
index: config.elasticsearch.index || "misskey_note",
})
.then((exist) => {
if (!exist.body) {
client.indices.create({
index: config.elasticsearch.index || "misskey_note",
body: index,
});
}
});
}
export default client;

View file

@ -1,451 +0,0 @@
import { Health, Index, MeiliSearch, Stats } from "meilisearch";
import { dbLogger } from "./logger.js";
import config from "@/config/index.js";
import { Note } from "@/models/entities/note.js";
import * as url from "url";
import { ILocalUser } from "@/models/entities/user.js";
import { Followings, Users } from "@/models/index.js";
const logger = dbLogger.createSubLogger("meilisearch", "gray", false);
let posts: Index;
let client: MeiliSearch;
const hasConfig =
config.meilisearch &&
(config.meilisearch.host ||
config.meilisearch.port ||
config.meilisearch.apiKey);
if (hasConfig) {
const host = hasConfig ? config.meilisearch.host ?? "localhost" : "";
const port = hasConfig ? config.meilisearch.port ?? 7700 : 0;
const auth = hasConfig ? config.meilisearch.apiKey ?? "" : "";
const ssl = hasConfig ? config.meilisearch.ssl ?? false : false;
logger.info("Connecting to MeiliSearch");
client = new MeiliSearch({
host: `${ssl ? "https" : "http"}://${host}:${port}`,
apiKey: auth,
});
posts = client.index("posts");
posts
.updateSearchableAttributes(["text"])
.catch((e) =>
logger.error(`Setting searchable attr failed, searches won't work: ${e}`),
);
posts
.updateFilterableAttributes([
"userName",
"userHost",
"mediaAttachment",
"createdAt",
"userId",
])
.catch((e) =>
logger.error(
`Setting filterable attr failed, advanced searches won't work: ${e}`,
),
);
posts
.updateSortableAttributes(["createdAt"])
.catch((e) =>
logger.error(
`Setting sortable attr failed, placeholder searches won't sort properly: ${e}`,
),
);
posts
.updateStopWords([
"the",
"a",
"as",
"be",
"of",
"they",
"these",
"is",
"are",
"これ",
"それ",
"あれ",
"この",
"その",
"あの",
"ここ",
"そこ",
"あそこ",
"こちら",
"どこ",
"私",
"僕",
"俺",
"君",
"あなた",
"我々",
"私達",
"彼女",
"彼",
"です",
"ます",
"は",
"が",
"の",
"に",
"を",
"で",
"へ",
"から",
"まで",
"より",
"も",
"どの",
"と",
"それで",
"しかし",
])
.catch((e) =>
logger.error(
`Failed to set Meilisearch stop words, database size will be larger: ${e}`,
),
);
posts
.updateRankingRules([
"sort",
"words",
"typo",
"proximity",
"attribute",
"exactness",
])
.catch((e) => {
logger.error("Failed to set ranking rules, sorting won't work properly.");
});
logger.info("Connected to MeiliSearch");
}
export type MeilisearchNote = {
id: string;
text: string;
userId: string;
userHost: string;
userName: string;
channelId: string;
mediaAttachment: string;
createdAt: number;
};
function timestampToUnix(timestamp: string) {
let unix = 0;
// Only contains numbers => UNIX timestamp
if (/^\d+$/.test(timestamp)) {
unix = Number.parseInt(timestamp);
}
if (unix === 0) {
// Try to parse the timestamp as JavaScript Date
const date = Date.parse(timestamp);
if (Number.isNaN(date)) return 0;
unix = date / 1000;
}
return unix;
}
export default hasConfig
? {
search: async (
query: string,
limit: number,
offset: number,
userCtx: ILocalUser | null,
overrideSort: string | null,
) => {
/// Advanced search syntax
/// from:user => filter by user + optional domain
/// has:image/video/audio/text/file => filter by attachment types
/// domain:domain.com => filter by domain
/// before:Date => show posts made before Date
/// after: Date => show posts made after Date
/// "text" => get posts with exact text between quotes
/// filter:following => show results only from users you follow
/// filter:followers => show results only from followers
/// order:desc/asc => order results ascending or descending
const constructedFilters: string[] = [];
let sortRules: string[] = [];
const splitSearch = query.split(" ");
// Detect search operators and remove them from the actual query
const filteredSearchTerms = (
await Promise.all(
splitSearch.map(async (term) => {
if (term.startsWith("has:")) {
const fileType = term.slice(4);
constructedFilters.push(`mediaAttachment = "${fileType}"`);
return null;
} else if (term.startsWith("from:")) {
let user = term.slice(5);
if (user.length === 0) return null;
// Cut off leading @, those aren't saved in the DB
if (user.charAt(0) === "@") {
user = user.slice(1);
}
// Determine if we got a webfinger address or a single username
if (user.split("@").length > 1) {
const splitUser = user.split("@");
const domain = splitUser.pop();
user = splitUser.join("@");
constructedFilters.push(
`userName = ${user} AND userHost = ${domain}`,
);
} else {
constructedFilters.push(`userName = ${user}`);
}
return null;
} else if (term.startsWith("domain:")) {
const domain = term.slice(7);
if (
domain.length === 0 ||
domain === "local" ||
domain === config.hostname
) {
constructedFilters.push("userHost NOT EXISTS");
return null;
}
constructedFilters.push(`userHost = ${domain}`);
return null;
} else if (term.startsWith("after:")) {
const timestamp = term.slice(6);
const unix = timestampToUnix(timestamp);
if (unix !== 0) constructedFilters.push(`createdAt > ${unix}`);
return null;
} else if (term.startsWith("before:")) {
const timestamp = term.slice(7);
const unix = timestampToUnix(timestamp);
if (unix !== 0) constructedFilters.push(`createdAt < ${unix}`);
return null;
} else if (term.startsWith("filter:following")) {
// Check if we got a context user
if (userCtx) {
// Fetch user follows from DB
const followedUsers = await Followings.find({
where: {
followerId: userCtx.id,
},
select: {
followeeId: true,
},
});
const followIDs = followedUsers.map(
(user) => user.followeeId,
);
if (followIDs.length === 0) return null;
constructedFilters.push(`userId IN [${followIDs.join(",")}]`);
} else {
logger.warn(
"search filtered to follows called without user context",
);
}
return null;
} else if (term.startsWith("filter:followers")) {
// Check if we got a context user
if (userCtx) {
// Fetch users follows from DB
const followedUsers = await Followings.find({
where: {
followeeId: userCtx.id,
},
select: {
followerId: true,
},
});
const followIDs = followedUsers.map(
(user) => user.followerId,
);
if (followIDs.length === 0) return null;
constructedFilters.push(`userId IN [${followIDs.join(",")}]`);
} else {
logger.warn(
"search filtered to followers called without user context",
);
}
return null;
} else if (term.startsWith("order:desc")) {
sortRules.push("createdAt:desc");
return null;
} else if (term.startsWith("order:asc")) {
sortRules.push("createdAt:asc");
return null;
}
return term;
}),
)
).filter((term) => term !== null);
// An empty search term with defined filters means we have a placeholder search => https://www.meilisearch.com/docs/reference/api/search#placeholder-search
// These have to be ordered manually, otherwise the *oldest* posts are returned first, which we don't want
// If the user has defined a sort rule, don't mess with it
if (
filteredSearchTerms.length === 0 &&
constructedFilters.length > 0 &&
sortRules.length === 0
) {
sortRules.push("createdAt:desc");
}
// More than one sorting rule doesn't make sense. We only keep the first one, otherwise weird stuff may happen.
if (sortRules.length > 1) {
sortRules = [sortRules[0]];
}
// An override sort takes precedence, user sorting is ignored here
if (overrideSort) {
sortRules = [overrideSort];
}
logger.info(`Searching for ${filteredSearchTerms.join(" ")}`);
logger.info(`Limit: ${limit}`);
logger.info(`Offset: ${offset}`);
logger.info(`Filters: ${constructedFilters}`);
logger.info(`Ordering: ${sortRules}`);
return posts.search(filteredSearchTerms.join(" "), {
limit: limit,
offset: offset,
filter: constructedFilters,
sort: sortRules,
});
},
ingestNote: async (ingestNotes: Note | Note[]) => {
if (ingestNotes instanceof Note) {
ingestNotes = [ingestNotes];
}
const indexingBatch: MeilisearchNote[] = [];
for (const note of ingestNotes) {
if (note.user === undefined) {
note.user = await Users.findOne({
where: {
id: note.userId,
},
});
}
let attachmentType = "";
if (note.attachedFileTypes.length > 0) {
attachmentType = note.attachedFileTypes[0].split("/")[0];
switch (attachmentType) {
case "image":
case "video":
case "audio":
case "text":
break;
default:
attachmentType = "file";
break;
}
}
indexingBatch.push(<MeilisearchNote>{
id: note.id.toString(),
text: note.text ? note.text : "",
userId: note.userId,
userHost:
note.userHost !== ""
? note.userHost
: url.parse(config.host).host,
channelId: note.channelId ? note.channelId : "",
mediaAttachment: attachmentType,
userName: note.user?.username ?? "UNKNOWN",
createdAt: note.createdAt.getTime() / 1000, // division by 1000 is necessary because Node returns in ms-accuracy
});
}
return posts
.addDocuments(indexingBatch, {
primaryKey: "id",
})
.then(() =>
logger.info(`sent ${indexingBatch.length} posts for indexing`),
);
},
serverStats: async () => {
const health: Health = await client.health();
const stats: Stats = await client.getStats();
return {
health: health.status,
size: stats.databaseSize,
indexed_count: stats.indexes.posts.numberOfDocuments,
};
},
deleteNotes: async (note: Note | Note[] | string | string[]) => {
if (note instanceof Note) {
note = [note];
}
if (typeof note === "string") {
note = [note];
}
const deletionBatch = note
.map((n) => {
if (n instanceof Note) {
return n.id;
}
if (n.length > 0) return n;
logger.error(
`Failed to delete note from Meilisearch, invalid post ID: ${JSON.stringify(
n,
)}`,
);
throw new Error(
`Invalid note ID passed to meilisearch deleteNote: ${JSON.stringify(
n,
)}`,
);
})
.filter((el) => el !== null);
await posts.deleteDocuments(deletionBatch as string[]).then(() => {
logger.info(
`submitted ${deletionBatch.length} large batch for deletion`,
);
});
},
}
: null;

View file

@ -1,51 +0,0 @@
import * as SonicChannel from "sonic-channel";
import { dbLogger } from "./logger.js";
import config from "@/config/index.js";
const logger = dbLogger.createSubLogger("sonic", "gray", false);
const handlers = (type: string): SonicChannel.Handlers => ({
connected: () => {
logger.succ(`Connected to Sonic ${type}`);
},
disconnected: (error) => {
logger.warn(`Disconnected from Sonic ${type}, error: ${error}`);
},
error: (error) => {
logger.warn(`Sonic ${type} error: ${error}`);
},
retrying: () => {
logger.info(`Sonic ${type} retrying`);
},
timeout: () => {
logger.warn(`Sonic ${type} timeout`);
},
});
const hasConfig =
config.sonic && (config.sonic.host || config.sonic.port || config.sonic.auth);
if (hasConfig) {
logger.info("Connecting to Sonic");
}
const host = hasConfig ? config.sonic.host ?? "localhost" : "";
const port = hasConfig ? config.sonic.port ?? 1491 : 0;
const auth = hasConfig ? config.sonic.auth ?? "SecretPassword" : "";
const collection = hasConfig ? config.sonic.collection ?? "main" : "";
const bucket = hasConfig ? config.sonic.bucket ?? "default" : "";
export default hasConfig
? {
search: new SonicChannel.Search({ host, port, auth }).connect(
handlers("search"),
),
ingest: new SonicChannel.Ingest({ host, port, auth }).connect(
handlers("ingest"),
),
collection,
bucket,
}
: null;

View file

@ -13,7 +13,7 @@ import processDb from "./processors/db/index.js";
import processObjectStorage from "./processors/object-storage/index.js";
import processSystemQueue from "./processors/system/index.js";
import processWebhookDeliver from "./processors/webhook-deliver.js";
import processBackground from "./processors/background/index.js";
// import processBackground from "./processors/background/index.js";
import { endedPollNotification } from "./processors/ended-poll-notification.js";
import { queueLogger } from "./logger.js";
import { getJobInfo } from "./get-job-info.js";
@ -482,13 +482,13 @@ export function createCleanRemoteFilesJob() {
);
}
export function createIndexAllNotesJob(data = {}) {
return backgroundQueue.add("indexAllNotes", data, {
removeOnComplete: true,
removeOnFail: false,
timeout: 1000 * 60 * 60 * 24,
});
}
// export function createIndexAllNotesJob(data = {}) {
// return backgroundQueue.add("indexAllNotes", data, {
// removeOnComplete: true,
// removeOnFail: false,
// timeout: 1000 * 60 * 60 * 24,
// });
// }
export function webhookDeliver(
webhook: Webhook,
@ -526,7 +526,7 @@ export default function () {
webhookDeliverQueue.process(64, processWebhookDeliver);
processDb(dbQueue);
processObjectStorage(objectStorageQueue);
processBackground(backgroundQueue);
// processBackground(backgroundQueue);
systemQueue.add(
"cleanCharts",

View file

@ -1,88 +0,0 @@
import type Bull from "bull";
import type { DoneCallback } from "bull";
import { queueLogger } from "../../logger.js";
import { Notes } from "@/models/index.js";
import { MoreThan } from "typeorm";
import { index } from "@/services/note/create.js";
import { Note } from "@/models/entities/note.js";
import meilisearch from "@/db/meilisearch.js";
const logger = queueLogger.createSubLogger("index-all-notes");
export default async function indexAllNotes(
job: Bull.Job<Record<string, unknown>>,
done: DoneCallback,
): Promise<void> {
logger.info("Indexing all notes...");
let cursor: string | null = (job.data.cursor as string) ?? null;
let indexedCount: number = (job.data.indexedCount as number) ?? 0;
let total: number = (job.data.total as number) ?? 0;
let running = true;
const take = 10000;
const batch = 100;
while (running) {
logger.info(
`Querying for ${take} notes ${indexedCount}/${
total ? total : "?"
} at ${cursor}`,
);
let notes: Note[] = [];
try {
notes = await Notes.find({
where: {
...(cursor ? { id: MoreThan(cursor) } : {}),
},
take: take,
order: {
id: 1,
},
relations: ["user"],
});
} catch (e: any) {
logger.error(`Failed to query notes ${e}`);
done(e);
break;
}
if (notes.length === 0) {
await job.progress(100);
running = false;
break;
}
try {
const count = await Notes.count();
total = count;
await job.update({ indexedCount, cursor, total });
} catch (e) {}
for (let i = 0; i < notes.length; i += batch) {
const chunk = notes.slice(i, i + batch);
if (meilisearch) {
await meilisearch.ingestNote(chunk);
}
await Promise.all(chunk.map((note) => index(note, true)));
indexedCount += chunk.length;
const pct = (indexedCount / total) * 100;
await job.update({ indexedCount, cursor, total });
await job.progress(+pct.toFixed(1));
logger.info(`Indexed notes ${indexedCount}/${total ? total : "?"}`);
}
cursor = notes[notes.length - 1].id;
await job.update({ indexedCount, cursor, total });
if (notes.length < take) {
running = false;
}
}
done();
logger.info("All notes have been indexed.");
}

View file

@ -1,12 +0,0 @@
import type Bull from "bull";
import indexAllNotes from "./index-all-notes.js";
const jobs = {
indexAllNotes,
} as Record<string, Bull.ProcessCallbackFunction<Record<string, unknown>>>;
export default function (q: Bull.Queue) {
for (const [k, v] of Object.entries(jobs)) {
q.process(k, 16, v);
}
}

View file

@ -7,7 +7,7 @@ import type { DriveFile } from "@/models/entities/drive-file.js";
import { MoreThan } from "typeorm";
import { deleteFileSync } from "@/services/drive/delete-file.js";
import { sendEmail } from "@/services/send-email.js";
import meilisearch from "@/db/meilisearch.js";
// import meilisearch from "@/db/meilisearch.js";
const logger = queueLogger.createSubLogger("delete-account");
@ -42,9 +42,9 @@ export async function deleteAccount(
cursor = notes[notes.length - 1].id;
await Notes.delete(notes.map((note) => note.id));
if (meilisearch) {
await meilisearch.deleteNotes(notes);
}
// if (meilisearch) {
// await meilisearch.deleteNotes(notes);
// }
}
logger.succ("All of notes deleted");

View file

@ -1,5 +1,5 @@
import define from "@/server/api/define.js";
import { createIndexAllNotesJob } from "@/queue/index.js";
// import { createIndexAllNotesJob } from "@/queue/index.js";
export const meta = {
tags: ["admin"],
@ -22,7 +22,7 @@ export const paramDef = {
} as const;
export default define(meta, paramDef, async (ps, _me) => {
createIndexAllNotesJob({
cursor: ps.cursor ?? undefined,
});
// createIndexAllNotesJob({
// cursor: ps.cursor ?? undefined,
// });
});

View file

@ -1,5 +1,5 @@
import { In } from "typeorm";
import { index } from "@/services/note/create.js";
// import { index } from "@/services/note/create.js";
import type { IRemoteUser, User } from "@/models/entities/user.js";
import {
Users,
@ -625,7 +625,7 @@ export default define(meta, paramDef, async (ps, user) => {
}
if (publishing && user.isIndexable) {
index(note, true);
// index(note, true);
// Publish update event for the updated note details
publishNoteStream(note.id, "updated", {

View file

@ -1,7 +1,7 @@
import * as os from "node:os";
import si from "systeminformation";
import define from "@/server/api/define.js";
import meilisearch from "@/db/meilisearch.js";
// import meilisearch from "@/db/meilisearch.js";
import { fetchMeta } from "@/misc/fetch-meta.js";
export const meta = {
@ -64,14 +64,14 @@ export default define(meta, paramDef, async () => {
};
});
async function meilisearchStatus() {
if (meilisearch) {
return meilisearch.serverStats();
} else {
return {
health: "unconfigured",
size: 0,
indexed_count: 0,
};
}
}
// async function meilisearchStatus() {
// if (meilisearch) {
// return meilisearch.serverStats();
// } else {
// return {
// health: "unconfigured",
// size: 0,
// indexed_count: 0,
// };
// }
// }

View file

@ -82,7 +82,7 @@ const nodeinfo2 = async () => {
disableRecommendedTimeline: meta.disableRecommendedTimeline,
disableGlobalTimeline: meta.disableGlobalTimeline,
emailRequiredForSignup: meta.emailRequiredForSignup,
searchFilters: config.meilisearch ? true : false,
searchFilters: false, // TODO: implement search filters
postEditing: true,
postImports: meta.experimentalFeatures?.postImports || false,
enableHcaptcha: meta.enableHcaptcha,

View file

@ -1,6 +1,6 @@
import * as mfm from "mfm-js";
import es from "@/db/elasticsearch.js";
import sonic from "@/db/sonic.js";
// import es from "@/db/elasticsearch.js";
// import sonic from "@/db/sonic.js";
import {
publishMainStream,
publishNotesStream,
@ -59,7 +59,7 @@ import type { UserProfile } from "@/models/entities/user-profile.js";
import { db } from "@/db/postgre.js";
import { getActiveWebhooks } from "@/misc/webhook-cache.js";
import { shouldSilenceInstance } from "@/misc/should-block-instance.js";
import meilisearch from "@/db/meilisearch.js";
// import meilisearch from "@/db/meilisearch.js";
import { redisClient } from "@/db/redis.js";
import { Mutex } from "redis-semaphore";
import { langmap } from "@/misc/langmap.js";
@ -652,9 +652,9 @@ export default async (
}
// Register to search database
if (user.isIndexable) {
await index(note, false);
}
// if (user.isIndexable) {
// await index(note, false);
// }
});
async function renderNoteOrRenoteActivity(data: Option, note: Note) {
@ -810,39 +810,39 @@ async function insertNote(
}
}
export async function index(note: Note, reindexing: boolean): Promise<void> {
if (!note.text || note.visibility !== "public") return;
// export async function index(note: Note, reindexing: boolean): Promise<void> {
// if (!note.text || note.visibility !== "public") return;
if (config.elasticsearch && es) {
es.index({
index: config.elasticsearch.index || "misskey_note",
id: note.id.toString(),
body: {
text: normalizeForSearch(note.text),
userId: note.userId,
userHost: note.userHost,
},
});
}
// if (config.elasticsearch && es) {
// es.index({
// index: config.elasticsearch.index || "misskey_note",
// id: note.id.toString(),
// body: {
// text: normalizeForSearch(note.text),
// userId: note.userId,
// userHost: note.userHost,
// },
// });
// }
if (sonic) {
await sonic.ingest.push(
sonic.collection,
sonic.bucket,
JSON.stringify({
id: note.id,
userId: note.userId,
userHost: note.userHost,
channelId: note.channelId,
}),
note.text,
);
}
// if (sonic) {
// await sonic.ingest.push(
// sonic.collection,
// sonic.bucket,
// JSON.stringify({
// id: note.id,
// userId: note.userId,
// userHost: note.userHost,
// channelId: note.channelId,
// }),
// note.text,
// );
// }
if (meilisearch && !reindexing) {
await meilisearch.ingestNote(note);
}
}
// if (meilisearch && !reindexing) {
// await meilisearch.ingestNote(note);
// }
// }
async function notifyToWatchersOfRenotee(
renote: Note,

View file

@ -16,7 +16,7 @@ import {
import { countSameRenotes } from "@/misc/count-same-renotes.js";
import { registerOrFetchInstanceDoc } from "@/services/register-or-fetch-instance-doc.js";
import { deliverToRelays } from "@/services/relay.js";
import meilisearch from "@/db/meilisearch.js";
// import meilisearch from "@/db/meilisearch.js";
/**
* 稿
@ -118,9 +118,9 @@ export default async function (
});
}
if (meilisearch) {
await meilisearch.deleteNotes(note.id);
}
// if (meilisearch) {
// await meilisearch.deleteNotes(note.id);
// }
}
async function findCascadingNotes(note: Note) {

View file

@ -30,7 +30,7 @@
</div>
</div>
<div class="_panel">
<!-- <div class="_panel">
<XPie class="pie" :value="meiliProgress" />
<div>
<p><i :class="icon('ph-file-search')"></i>MeiliSearch</p>
@ -46,7 +46,7 @@
{{ meiliIndexCount }}
</p>
</div>
</div>
</div> -->
</div>
</div>
</template>
@ -57,7 +57,7 @@ import XPie from "../../widgets/server-metric/pie.vue";
import bytes from "@/filters/bytes";
import { useStream } from "@/stream";
import * as os from "@/os";
import { i18n } from "@/i18n";
// import { i18n } from "@/i18n";
import icon from "@/scripts/icon";
const stream = useStream();
@ -72,10 +72,10 @@ const memTotal = ref(0);
const memUsed = ref(0);
const memFree = ref(0);
const meiliProgress = ref(0);
const meiliTotalSize = ref(0);
const meiliIndexCount = ref(0);
const meiliAvailable = ref("unavailable");
// const meiliProgress = ref(0);
// const meiliTotalSize = ref(0);
// const meiliIndexCount = ref(0);
// const meiliAvailable = ref("unavailable");
const diskUsage = computed(() => meta.fs.used / meta.fs.total);
const diskTotal = computed(() => meta.fs.total);
@ -90,10 +90,10 @@ function onStats(stats) {
memUsed.value = stats.mem.active;
memFree.value = memTotal.value - memUsed.value;
meiliTotalSize.value = stats.meilisearch.size;
meiliIndexCount.value = stats.meilisearch.indexed_count;
meiliAvailable.value = stats.meilisearch.health;
meiliProgress.value = meiliIndexCount.value / serverStats.notesCount;
// meiliTotalSize.value = stats.meilisearch.size;
// meiliIndexCount.value = stats.meilisearch.indexed_count;
// meiliAvailable.value = stats.meilisearch.health;
// meiliProgress.value = meiliIndexCount.value / serverStats.notesCount;
}
const connection = stream.useChannel("serverStats");

View file

@ -1,26 +0,0 @@
import { i18n } from "@/i18n";
import * as os from "@/os";
export async function indexPosts() {
const { canceled, result: index } = await os.inputText({
title: i18n.ts.indexFrom,
text: i18n.ts.indexFromDescription,
});
if (canceled) return;
if (index == null || index === "") {
await os.api("admin/search/index-all");
await os.alert({
type: "info",
text: i18n.ts.indexNotice,
});
} else {
await os.api("admin/search/index-all", {
cursor: index,
});
await os.alert({
type: "info",
text: i18n.ts.indexNotice,
});
}
}

View file

@ -46,13 +46,6 @@
:connection="connection"
:meta="meta"
/>
<XMeili
v-else-if="
instance.features.searchFilters && widgetProps.view === 5
"
:connection="connection"
:meta="meta"
/>
</div>
</MkContainer>
</template>
@ -66,7 +59,6 @@ import XNet from "./net.vue";
import XCpu from "./cpu.vue";
import XMemory from "./mem.vue";
import XDisk from "./disk.vue";
import XMeili from "./meilisearch.vue";
import MkContainer from "@/components/MkContainer.vue";
import type { GetFormResultType } from "@/scripts/form";
import * as os from "@/os";

View file

@ -1,86 +0,0 @@
<template>
<div class="verusivbr">
<XPie
v-tooltip="i18n.ts.meiliIndexCount"
class="pie"
:value="progress"
:reverse="true"
/>
<div>
<p><i :class="icon('ph-file-search')"></i>MeiliSearch</p>
<p>{{ i18n.ts._widgets.meiliStatus }}: {{ available }}</p>
<p>{{ i18n.ts._widgets.meiliSize }}: {{ bytes(totalSize, 1) }}</p>
<p>{{ i18n.ts._widgets.meiliIndexCount }}: {{ indexCount }}</p>
</div>
</div>
<br />
</template>
<script lang="ts" setup>
import { onBeforeUnmount, onMounted, ref } from "vue";
import XPie from "./pie.vue";
import bytes from "@/filters/bytes";
import { i18n } from "@/i18n";
import * as os from "@/os";
import icon from "@/scripts/icon";
const props = defineProps<{
connection: any;
meta: any;
}>();
const progress = ref<number>(0);
const serverStats = ref(null);
const totalSize = ref<number>(0);
const indexCount = ref<number>(0);
const available = ref<string>("unavailable");
function onStats(stats) {
totalSize.value = stats.meilisearch.size;
indexCount.value = stats.meilisearch.indexed_count;
available.value = stats.meilisearch.health;
progress.value = indexCount.value / serverStats.value.notesCount;
}
onMounted(() => {
os.api("stats", {}).then((res) => {
serverStats.value = res;
});
props.connection.on("stats", onStats);
});
onBeforeUnmount(() => {
props.connection.off("stats", onStats);
});
</script>
<style lang="scss" scoped>
.verusivbr {
display: flex;
padding: 16px;
> .pie {
height: 82px;
flex-shrink: 0;
margin-inline-end: 16px;
}
> div {
flex: 1;
> p {
margin: 0;
font-size: 0.8em;
&:first-child {
font-weight: bold;
margin-bottom: 4px;
> i {
margin-inline-end: 4px;
}
}
}
}
}
</style>