-
Notifications
You must be signed in to change notification settings - Fork 24
Retrieve latest headers and cookies #161
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
729c12e
7361b76
376a11d
5134f58
baa2bbc
1560857
ce30c9c
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -4,6 +4,7 @@ import { RequestHistory, RequestProgress } from './rpc'; | |
import mutex from './mutex'; | ||
import { minimatch } from 'minimatch'; | ||
const charwise = require('charwise'); | ||
import { safeParseJSON } from '../../utils/misc'; | ||
|
||
export const db = new Level('./ext-db', { | ||
valueEncoding: 'json', | ||
|
@@ -23,10 +24,10 @@ const pluginMetadataDb = db.sublevel<string, PluginMetadata>('pluginMetadata', { | |
const connectionDb = db.sublevel<string, boolean>('connections', { | ||
valueEncoding: 'json', | ||
}); | ||
const cookiesDb = db.sublevel<string, boolean>('cookies', { | ||
export const cookiesDb = db.sublevel<string, boolean>('cookies', { | ||
valueEncoding: 'json', | ||
}); | ||
const headersDb = db.sublevel<string, boolean>('headers', { | ||
export const headersDb = db.sublevel<string, boolean>('headers', { | ||
valueEncoding: 'json', | ||
}); | ||
const localStorageDb = db.sublevel<string, any>('sessionStorage', { | ||
|
@@ -335,49 +336,40 @@ export async function setConnection(origin: string) { | |
return true; | ||
} | ||
|
||
export async function setCookies(host: string, name: string, value: string) { | ||
return mutex.runExclusive(async () => { | ||
await cookiesDb.sublevel(host).put(name, value); | ||
return true; | ||
}); | ||
} | ||
|
||
export async function clearCookies(host: string) { | ||
async function setValue( | ||
db: typeof cookiesDb | typeof headersDb, | ||
key: string, | ||
name: string, | ||
value: string, | ||
): Promise<boolean> { | ||
return mutex.runExclusive(async () => { | ||
await cookiesDb.sublevel(host).clear(); | ||
const sublevel = db.sublevel(key); | ||
const timestampSublevel = sublevel.sublevel('timestamp'); | ||
const timestamp = Date.now(); | ||
|
||
await Promise.all([ | ||
sublevel.put(name, value), | ||
timestampSublevel.put(name, timestamp.toString()), | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Almost there! we do not care about timestamping each key, we only care about timestamp per URL. Instead of timestamping here, you should time stamp whenever a request is intercepted. You can just add a method |
||
]); | ||
return true; | ||
}); | ||
} | ||
|
||
export async function getCookies(link: string, name: string) { | ||
try { | ||
const existing = await cookiesDb.sublevel(link).get(name); | ||
return existing; | ||
} catch (e) { | ||
return null; | ||
} | ||
export async function setCookies( | ||
host: string, | ||
name: string, | ||
value: string, | ||
): Promise<boolean> { | ||
return setValue(cookiesDb, host, name, value); | ||
} | ||
|
||
export async function getCookiesByHost(link: string) { | ||
const ret: { [key: string]: string } = {}; | ||
const links: { [k: string]: boolean } = {}; | ||
const url = urlify(link); | ||
|
||
for await (const sublevel of cookiesDb.keys({ keyEncoding: 'utf8' })) { | ||
const l = sublevel.split('!')[1]; | ||
links[l] = true; | ||
} | ||
|
||
const cookieLink = url | ||
? Object.keys(links).filter((l) => minimatch(l, link))[0] | ||
: Object.keys(links).filter((l) => urlify(l)?.host === link)[0]; | ||
|
||
if (!cookieLink) return ret; | ||
|
||
for await (const [key, value] of cookiesDb.sublevel(cookieLink).iterator()) { | ||
ret[key] = value; | ||
} | ||
return ret; | ||
export async function setHeaders( | ||
link: string, | ||
name: string, | ||
value?: string, | ||
): Promise<boolean | null> { | ||
if (!value) return null; | ||
return setValue(headersDb, link, name, value); | ||
} | ||
|
||
export async function deleteConnection(origin: string) { | ||
|
@@ -397,49 +389,74 @@ export async function getConnection(origin: string) { | |
} | ||
} | ||
|
||
export async function setHeaders(link: string, name: string, value?: string) { | ||
if (!value) return null; | ||
return mutex.runExclusive(async () => { | ||
await headersDb.sublevel(link).put(name, value); | ||
return true; | ||
}); | ||
} | ||
|
||
export async function clearHeaders(host: string) { | ||
return mutex.runExclusive(async () => { | ||
await headersDb.sublevel(host).clear(); | ||
return true; | ||
}); | ||
} | ||
|
||
export async function getHeaders(host: string, name: string) { | ||
function parseValue(rawValue: string): string { | ||
try { | ||
const existing = await headersDb.sublevel(host).get(name); | ||
return existing; | ||
} catch (e) { | ||
return null; | ||
const parsed = safeParseJSON(rawValue); | ||
return parsed && typeof parsed === 'object' && 'value' in parsed | ||
? parsed.value | ||
: rawValue; | ||
} catch { | ||
return rawValue; | ||
} | ||
} | ||
export async function getHeadersByHost(link: string) { | ||
const ret: { [key: string]: string } = {}; | ||
const url = urlify(link); | ||
|
||
async function getValuesByHost( | ||
db: typeof cookiesDb | typeof headersDb, | ||
link: string, | ||
type: 'cookie' | 'header', | ||
): Promise<{ [key: string]: string }> { | ||
const ret: { [key: string]: { value: string; timestamp: number } } = {}; | ||
const links: { [k: string]: boolean } = {}; | ||
for await (const sublevel of headersDb.keys({ keyEncoding: 'utf8' })) { | ||
const l = sublevel.split('!')[1]; | ||
const url = urlify(link); | ||
|
||
for await (const sublevel of db.keys({ keyEncoding: 'utf8' })) { | ||
const l = sublevel.split('!')[1] || sublevel; | ||
links[l] = true; | ||
} | ||
|
||
const headerLink = url | ||
? Object.keys(links).filter((l) => minimatch(l, link))[0] | ||
const matchedLink = url | ||
? Object.keys(links).filter((l) => l === link || minimatch(l, link))[0] | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. instead of returning the first filtered result, return the MOST RECENT filtered result based on url timestamp |
||
: Object.keys(links).filter((l) => urlify(l)?.host === link)[0]; | ||
|
||
if (!headerLink) return ret; | ||
if (!matchedLink) return {}; | ||
|
||
for await (const [key, value] of headersDb.sublevel(headerLink).iterator()) { | ||
ret[key] = value; | ||
const sublevel = db.sublevel(matchedLink); | ||
const timestampSublevel = sublevel.sublevel('timestamp'); | ||
|
||
for await (const [key, rawValue] of sublevel.iterator({ | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. this is not needed |
||
valueEncoding: 'utf8', | ||
})) { | ||
if (key === 'timestamp' || key.startsWith('!timestamp!')) continue; | ||
|
||
const timestamp = | ||
parseInt(await timestampSublevel.get(key).catch(() => '0'), 10) || 0; | ||
const value = parseValue(rawValue); | ||
|
||
if (!ret[key] || timestamp > ret[key].timestamp) { | ||
ret[key] = { value, timestamp }; | ||
} | ||
} | ||
return ret; | ||
|
||
return Object.fromEntries(Object.entries(ret).map(([k, v]) => [k, v.value])); | ||
} | ||
|
||
export async function getCookiesByHost( | ||
link: string, | ||
): Promise<{ [key: string]: string }> { | ||
return getValuesByHost(cookiesDb, link, 'cookie'); | ||
} | ||
|
||
export async function getHeadersByHost( | ||
link: string, | ||
): Promise<{ [key: string]: string }> { | ||
return getValuesByHost(headersDb, link, 'header'); | ||
} | ||
|
||
export async function setLocalStorage( | ||
|
Uh oh!
There was an error while loading. Please reload this page.