wikiDb
This commit is contained in:
parent
ca8d26ccf8
commit
c8f1a52b7c
42
zimmer.js
42
zimmer.js
|
@ -69,7 +69,7 @@ var srcPath
|
||||||
var outPath
|
var outPath
|
||||||
var out // output file writer
|
var out // output file writer
|
||||||
|
|
||||||
var indexerDb
|
var wikiDb
|
||||||
var dirQueue
|
var dirQueue
|
||||||
var clusterWriter
|
var clusterWriter
|
||||||
|
|
||||||
|
@ -415,7 +415,7 @@ class Cluster {
|
||||||
const offset = await out.write( Buffer.concat([ Buffer.from([ compression ]), data ]))
|
const offset = await out.write( Buffer.concat([ Buffer.from([ compression ]), data ]))
|
||||||
|
|
||||||
log( 'Cluster saved', id, offset )
|
log( 'Cluster saved', id, offset )
|
||||||
return indexerDb.run(
|
return wikiDb.run(
|
||||||
'INSERT INTO clusters (id, offset) VALUES (?,?)',
|
'INSERT INTO clusters (id, offset) VALUES (?,?)',
|
||||||
[
|
[
|
||||||
id,
|
id,
|
||||||
|
@ -576,7 +576,7 @@ class Item {
|
||||||
this.mimeId(),
|
this.mimeId(),
|
||||||
]
|
]
|
||||||
|
|
||||||
return indexerDb.run(
|
return wikiDb.run(
|
||||||
'INSERT INTO articles ( urlKey, titleKey, revision, mimeId ) VALUES ( ?,?,?,? )',
|
'INSERT INTO articles ( urlKey, titleKey, revision, mimeId ) VALUES ( ?,?,?,? )',
|
||||||
row
|
row
|
||||||
)
|
)
|
||||||
|
@ -632,7 +632,7 @@ class Item {
|
||||||
const id = await this.getId()
|
const id = await this.getId()
|
||||||
try {
|
try {
|
||||||
log( 'saveDirEntryIndex', id, offset, this.path )
|
log( 'saveDirEntryIndex', id, offset, this.path )
|
||||||
return await indexerDb.run(
|
return await wikiDb.run(
|
||||||
'INSERT INTO dirEntries (id, offset) VALUES (?,?)',
|
'INSERT INTO dirEntries (id, offset) VALUES (?,?)',
|
||||||
[
|
[
|
||||||
id,
|
id,
|
||||||
|
@ -717,7 +717,7 @@ class Redirect extends Item {
|
||||||
|
|
||||||
async saveRedirectIndex () {
|
async saveRedirectIndex () {
|
||||||
const id = await this.getId()
|
const id = await this.getId()
|
||||||
return indexerDb.run(
|
return wikiDb.run(
|
||||||
'INSERT INTO redirects (id, targetKey, fragment) VALUES (?,?,?)',
|
'INSERT INTO redirects (id, targetKey, fragment) VALUES (?,?,?)',
|
||||||
[
|
[
|
||||||
id,
|
id,
|
||||||
|
@ -1085,9 +1085,9 @@ function fillInMetadata () {
|
||||||
return Promise.all( done )
|
return Promise.all( done )
|
||||||
}
|
}
|
||||||
|
|
||||||
async function openMetadata( dbName ) {
|
async function openWikiDb( dbName ) {
|
||||||
indexerDb = await sqlite.open( dbName )
|
wikiDb = await sqlite.open( dbName )
|
||||||
return indexerDb.exec(`
|
return wikiDb.exec(`
|
||||||
PRAGMA synchronous = OFF;
|
PRAGMA synchronous = OFF;
|
||||||
PRAGMA journal_mode = WAL;
|
PRAGMA journal_mode = WAL;
|
||||||
DROP INDEX IF EXISTS articleUrlKey ;
|
DROP INDEX IF EXISTS articleUrlKey ;
|
||||||
|
@ -1110,7 +1110,7 @@ async function openMetadata( dbName ) {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
async function newMetadata() {
|
async function newWikiDb() {
|
||||||
var dbName = ''
|
var dbName = ''
|
||||||
if ( argv.verbose ) {
|
if ( argv.verbose ) {
|
||||||
var parsed = osPath.parse( outPath )
|
var parsed = osPath.parse( outPath )
|
||||||
|
@ -1120,8 +1120,8 @@ async function newMetadata() {
|
||||||
await fs.unlink( dbName )
|
await fs.unlink( dbName )
|
||||||
} catch ( err ) {
|
} catch ( err ) {
|
||||||
}
|
}
|
||||||
indexerDb = await sqlite.open( dbName )
|
wikiDb = await sqlite.open( dbName )
|
||||||
return indexerDb.exec(
|
return wikiDb.exec(
|
||||||
'PRAGMA synchronous = OFF;' +
|
'PRAGMA synchronous = OFF;' +
|
||||||
'PRAGMA journal_mode = OFF;' +
|
'PRAGMA journal_mode = OFF;' +
|
||||||
//~ 'PRAGMA journal_mode = WAL;' +
|
//~ 'PRAGMA journal_mode = WAL;' +
|
||||||
|
@ -1151,7 +1151,7 @@ async function newMetadata() {
|
||||||
}
|
}
|
||||||
|
|
||||||
function sortArticles () {
|
function sortArticles () {
|
||||||
return indexerDb.exec(`
|
return wikiDb.exec(`
|
||||||
CREATE INDEX articleUrlKey ON articles (urlKey);
|
CREATE INDEX articleUrlKey ON articles (urlKey);
|
||||||
|
|
||||||
CREATE TABLE urlSorted AS
|
CREATE TABLE urlSorted AS
|
||||||
|
@ -1191,7 +1191,7 @@ async function loadRedirects () {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function resolveRedirects () {
|
async function resolveRedirects () {
|
||||||
var stmt = await indexerDb.prepare( `
|
var stmt = await wikiDb.prepare( `
|
||||||
SELECT
|
SELECT
|
||||||
src.id AS id,
|
src.id AS id,
|
||||||
src.urlKey AS urlKey,
|
src.urlKey AS urlKey,
|
||||||
|
@ -1238,7 +1238,7 @@ async function saveIndex ( params ) {
|
||||||
log( logInfo, 'start', params.count )
|
log( logInfo, 'start', params.count )
|
||||||
|
|
||||||
var startOffset
|
var startOffset
|
||||||
var stmt = await indexerDb.prepare( params.query )
|
var stmt = await wikiDb.prepare( params.query )
|
||||||
var i = 0
|
var i = 0
|
||||||
for ( let row; row = await stmt.get(); i++ ) {
|
for ( let row; row = await stmt.get(); i++ ) {
|
||||||
log( logInfo, i, row )
|
log( logInfo, i, row )
|
||||||
|
@ -1405,18 +1405,18 @@ async function initialise () {
|
||||||
log( 'reserving space for header and mime type list' )
|
log( 'reserving space for header and mime type list' )
|
||||||
await out.write( Buffer.alloc( headerLength + maxMimeLength ))
|
await out.write( Buffer.alloc( headerLength + maxMimeLength ))
|
||||||
|
|
||||||
var metadata = osPath.join( srcPath, 'metadata.db' )
|
var dbPath = osPath.join( srcPath, 'metadata.db' )
|
||||||
if ( await fs.exists( metadata )) {
|
if ( await fs.exists( dbPath )) {
|
||||||
preProcessed = true
|
preProcessed = true
|
||||||
try {
|
try {
|
||||||
mainPage.urlKey = 'A' + ( await fs.readFile( osPath.join( srcPath, 'mainpage' ))).toString()
|
mainPage.urlKey = 'A' + ( await fs.readFile( osPath.join( srcPath, 'mainpage' ))).toString()
|
||||||
} catch ( err ) {
|
} catch ( err ) {
|
||||||
warning( 'mainpage error', err )
|
warning( 'mainpage error', err )
|
||||||
}
|
}
|
||||||
await openMetadata( metadata )
|
await openWikiDb( dbPath )
|
||||||
return loadMimeTypes()
|
return loadMimeTypes()
|
||||||
} else {
|
} else {
|
||||||
await newMetadata()
|
await newWikiDb()
|
||||||
return fillInMetadata()
|
return fillInMetadata()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1452,7 +1452,7 @@ async function rawLoader () {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function loadPreProcessedArticles () {
|
async function loadPreProcessedArticles () {
|
||||||
var stmt = await indexerDb.prepare( `
|
var stmt = await wikiDb.prepare( `
|
||||||
SELECT
|
SELECT
|
||||||
id ,
|
id ,
|
||||||
mimeId ,
|
mimeId ,
|
||||||
|
@ -1484,7 +1484,7 @@ async function loadPreProcessedArticles () {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function loadMimeTypes () {
|
async function loadMimeTypes () {
|
||||||
var stmt = await indexerDb.prepare( `
|
var stmt = await wikiDb.prepare( `
|
||||||
SELECT
|
SELECT
|
||||||
id ,
|
id ,
|
||||||
value
|
value
|
||||||
|
@ -1516,7 +1516,7 @@ async function postProcess () {
|
||||||
|
|
||||||
async function finalise () {
|
async function finalise () {
|
||||||
header.checksumPos = await out.close() // close the output stream
|
header.checksumPos = await out.close() // close the output stream
|
||||||
await indexerDb.close()
|
await wikiDb.close()
|
||||||
await storeHeader()
|
await storeHeader()
|
||||||
return calculateFileHash()
|
return calculateFileHash()
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue
Block a user