-
-
Notifications
You must be signed in to change notification settings - Fork 37
/
init-items.ts
168 lines (161 loc) · 4.89 KB
/
init-items.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
import {
DBIndex,
DBMeta,
ExpiredValue,
FileInfo,
Item,
RepoMetaOverride,
RunOptions,
Source,
} from "./interface.ts";
import renderMarkdown from "./render-markdown.ts";
import Github from "./adapters/github.ts";
import {
exists,
getCachePath,
getDayNumber,
getWeekNumber,
readTextFile,
sha1,
} from "./util.ts";
import log from "./log.ts";
import { fs, path } from "./deps.ts";
import parser from "./parser/mod.ts";
import getGitBlame from "./get-git-blame.ts";
import { updateFile, updateItems } from "./db.ts";
export default async function initItems(
source: Source,
options: RunOptions,
dbMeta: DBMeta,
dbIndex: DBIndex,
dbCachedStars: Record<string, ExpiredValue>,
) {
// first get repo meta info from api
const api = new Github(source);
const metaOverrides: RepoMetaOverride = {};
if (source.default_branch) {
metaOverrides.default_branch = source.default_branch;
}
const meta = await api.getRepoMeta(metaOverrides);
const sources = dbMeta.sources;
//check repo folder is empty
const repoPath = path.join(getCachePath(false), "repos", source.identifier);
const isExist = await exists(repoPath);
log.debug(`repo ${repoPath} exist cache, try to pull updates`);
// then git clone the entire repo, and parse the files
if (isExist) {
// try to update
if (options.fetchRepoUpdates) {
const args: string[] = [
"--work-tree",
repoPath,
"--git-dir",
path.join(repoPath, ".git"),
];
const p = Deno.run({
cmd: ["git"].concat(args).concat(["pull"]),
});
await p.status();
}
} else {
// ensure parent folder exists
await fs.ensureDir(path.dirname(repoPath));
log.info(`cloning ${api.getCloneUrl()} to ${repoPath}`);
// try to clone
const p = Deno.run({
cmd: [
"git",
"clone",
"-b",
meta.default_branch,
api.getCloneUrl(),
repoPath,
],
});
await p.status();
}
const now = new Date();
sources[source.identifier] = sources[source.identifier] || {
created_at: now.toISOString(),
updated_at: now.toISOString(),
meta,
files: {},
};
for (const file of Object.keys(source.files)) {
const fileConfig = source.files[file];
const blameInfoMap = await getGitBlame(file, {
workTree: repoPath,
gitDir: path.join(repoPath, ".git"),
});
const items: Record<string, Item> = {};
const cachedFilePath = path.join(repoPath, file);
const content = await readTextFile(cachedFilePath);
const fileInfo: FileInfo = {
sourceConfig: source,
sourceMeta: sources[source.identifier],
filepath: file,
};
const docItems = await parser(content, fileInfo, dbCachedStars);
// console.log("docItems", docItems);
let latestUpdatedAt = new Date(0);
for (const docItem of docItems) {
const now = new Date();
const commitInfo = blameInfoMap.get(docItem.line);
if (commitInfo) {
const itemSha1 = await sha1(docItem.rawMarkdown);
const commitTime = commitInfo.committerTime;
const commitDate = new Date(Number(commitTime) * 1000);
const updatedAt = commitDate.toISOString();
items[itemSha1] = {
category: docItem.category,
category_html: renderMarkdown(docItem.category),
updated_at: updatedAt,
source_identifier: source.identifier,
file,
markdown: docItem.formatedMarkdown,
html: renderMarkdown(docItem.formatedMarkdown),
sha1: itemSha1,
checked_at: now.toISOString(),
updated_day: getDayNumber(new Date(updatedAt)),
updated_week: getWeekNumber(new Date(updatedAt)),
};
if (commitDate.getTime() > latestUpdatedAt.getTime()) {
latestUpdatedAt = commitDate;
}
} else {
throw new Error(
`no commit info for ${source.identifier} ${file} ${docItem.line}`,
);
}
}
const contentSha1 = await sha1(content);
// try to get items updated time
// get created time and updated time from blameinfo
let createdAt = now;
for (const blame of blameInfoMap.values()) {
const commitTime = blame.committerTime;
const commitDate = new Date(Number(commitTime) * 1000);
if (commitDate < createdAt) {
createdAt = commitDate;
}
}
sources[source.identifier].files[file] = {
sha1: contentSha1,
updated_at: latestUpdatedAt.toISOString(),
meta_created_at: now.toISOString(),
created_at: createdAt.toISOString(),
checked_at: now.toISOString(),
};
//write to file
// await writeJSONFile(formatedPath, itemsJson);
// write to db
await updateFile(fileInfo, content);
await updateItems(fileInfo, items, dbIndex);
log.info(
`init ${source.identifier}/${file} success, total ${
Object.keys(items).length
} items`,
);
}
dbMeta.sources = sources;
}