Create repo

This commit is contained in:
Levi Roberts
2022-12-23 17:30:48 -06:00
parent d76dcf2c72
commit c2014ce045
12 changed files with 2112 additions and 0 deletions

183
.gitignore vendored Normal file
View File

@@ -0,0 +1,183 @@
# File created using '.gitignore Generator' for Visual Studio Code: https://bit.ly/vscode-gig
# Created by https://www.toptal.com/developers/gitignore/api/visualstudiocode,linux,node
# Edit at https://www.toptal.com/developers/gitignore?templates=visualstudiocode,linux,node
### Linux ###
*~
# temporary files which can be created if a process still has a handle open of a deleted file
.fuse_hidden*
# KDE directory preferences
.directory
# Linux trash folder which might appear on any partition or disk
.Trash-*
# .nfs files are created when an open file is removed but is still being accessed
.nfs*
### Node ###
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
### Node Patch ###
# Serverless Webpack directories
.webpack/
# Optional stylelint cache
# SvelteKit build / generate output
.svelte-kit
### VisualStudioCode ###
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
!.vscode/*.code-snippets
# Local History for Visual Studio Code
.history/
# Built Visual Studio Code Extensions
*.vsix
### VisualStudioCode Patch ###
# Ignore all local history of files
.history
.ionide
# End of https://www.toptal.com/developers/gitignore/api/visualstudiocode,linux,node
# Custom rules (everything added below won't be overriden by 'Generate .gitignore File' if you use 'Update' option)
backup/*
data/*

28
package.json Normal file
View File

@@ -0,0 +1,28 @@
{
"name": "graphql-yoga",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"dev": "cross-env NODE_ENV=development ts-node-dev --exit-child --respawn src/main.ts",
"start": "ts-node src/main.ts"
},
"keywords": [],
"author": "",
"license": "ISC",
"devDependencies": {
"@types/node": "18.7.19",
"cross-env": "7.0.3",
"ts-node": "10.9.1",
"ts-node-dev": "2.0.0",
"typescript": "4.8.3"
},
"dependencies": {
"@graphql-tools/schema": "^9.0.4",
"@graphql-yoga/node": "^2.13.13",
"better-sqlite3": "^7.6.2",
"fastq": "^1.13.0",
"graphql": "^16.6.0",
"phin": "^3.6.1"
}
}

1053
pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

56
schema.graphql Normal file
View File

@@ -0,0 +1,56 @@
type Query {
stats: Stats
active: [GameInfo]
lookup(service: String!, url: String!): GameInfo
}
type Subscription {
active: [GameInfo]
stats: Stats
}
type Stats {
totalReqs: Int,
totalHits: Int,
totalMisses: Int,
}
type GameInfoOld {
id: Int!
"""Service provider, i.e Steam, Epic, etc"""
service: String
"""Type of resource"""
type: GameType
name: String
parentID: Int
parent: GameInfo
depots: [GameInfo]
}
type GameInfo {
id: Int!
service: String
"""Type of resource"""
type: GameType
client: String
name: String
content: String
parent: GameInfo
}
enum GameType {
APP
GAME
DEPOT
DLC
UNKNOWN
}

View File

@@ -0,0 +1,19 @@
'use strict'
module.exports = class genericLookupClient {
static parseID(url) {
// Legacy logs
url = url.replace('https://', '')
url = url.replace('http://', '')
const depotID = url
// TODO: hash url for unique ID
return depotID
}
static normalize(chunkData) {}
async getInfo() {
throw new Error('Unknown Service; Not implemented yet')
}
}

102
src/clients/steam/index.js Normal file
View File

@@ -0,0 +1,102 @@
'use strict'
const phin = require('phin')
const serviceURL = 'http://api.object.media:3001' // TODO: Load from .env file
module.exports = class steamLookupClient {
static parseID(url) {
// Example: '/depot/322331/chunk/XXX'
// Example: '/depot/322331/manifest/XXX/X/XXX'
// Legacy logs
url = url.replace('https://', '')
url = url.replace('http://', '')
const urlSections = url.split('/')
const depotID = urlSections[2]
return depotID
}
constructor(depotID) {
this.id = depotID
this.service = "steam"
}
// static login(), and instance reuseLogin()
static normalize(chunkData) {
if (chunkData.parent)
chunkData.parent = chunkData.parent?.id
return chunkData
}
async getInfo() {
console.log('Looking up game:', this.id)
try {
const url = new URL(`${serviceURL}/api`)
url.searchParams.append(
'query',
/* GraphQL */ `
query {
lookup(id: ${this.id}, service: "steam") {
id
service # Needed? We already know service.
type
name
# If we are a depot, we want the parent and all it's depots for faster lookups later
parent {
id
type
name
depots { # TODO: Implement @stream, since we don't necessarily need all the depots right away
id
type
name
}
}
# If we are a game, we want all it's depots for faster lookups later
depots { # TODO: Implement @stream, since we don't necessarily need all the depots right away
id
service # Needed? We already know service.
type
name
}
}
}
`,
)
// TODO: add depots to list, cuz right now it's only retrieving app ID as first fetch. Then it requires another fetch for
// depots that it sees.
const res = await phin({ url: url.toString(), method: 'GET', parse: 'json' })
return res.body?.data?.lookup
} catch(e) {
console.log('metadataAPI failed:')
console.log(e)
}
return null
}
}
// TODO: implement GQL/fetch cache with TTL for 30 minutes.
// TODO: FIXME: proton lookups, not in global steam API list. 1580130
// TODO: Set a hard limit on depots streamed to 20 for parent on metadata API. (MAYBE) 30 for game depots lookup; need to think through repercussions.
// Could sort the depots by date, so newest are always streamed

150
src/lib/chunkInfo.js Normal file
View File

@@ -0,0 +1,150 @@
'use strict'
const fastq = require('fastq')
let fq
const db = require('./database.js')
// TODO; move dbSchema into it's own file.
const dbTransformer = {
init: (data, dbObj) => {
if (dbObj.type === 'GAME')
data.parent = 0
},
id: 'number',
name: (oldValue, newValue) => {
// Preserve original name if new name is unknown.
if (oldValue !== newValue) {
if (newValue === 'Unknown Depot') {
// Make sure the old value exists
if (oldValue)
return oldValue
}
}
return newValue
},
type: 'string',
extra: 'string',
parent: (oldValue, newValue) => {
if (!oldValue)
return newValue
// No need to change if already MULTI
if (oldValue == 'MULTI')
return 'MULTI'
if (oldValue !== newValue)
return 'MULTI'
return newValue
}, //'string',
//depot: Set - depots should be serialized/deserialized to/from Sets
}
const clients = {
generic: require('../clients/generic/index.js'),
steam: require('../clients/steam/index.js'),
}
class chunkInfo {
static init() {
for (let client of Object.keys(clients)) {
db.prepare(client, dbTransformer)
}
}
static async process(client, url) {
// Initialize fast queue
if (!fq)
fq = fastq.promise(this.processItem, 1)
return fq.push({ client, url })
}
static async processItem(args) {
const { client, url } = args
// Resolve the lookup client and parse an ID
const lookupClient = useClient(client)
const chunkID = lookupClient.parseID(url)
// Return item from DB if we have it
//const depotItem = db.get(client, 'id', chunkID)
//if (depotItem)
// return depotItem
// FIXME: Resolve refs for this db section. Perhaps a better way would be to serialize/deserialize for the DB..
// FIXME: Re-enable DB cache once done testing meta api.
// Use client lookup to lookup item by ID
const lclient = new lookupClient(chunkID)
let chunkData
try {
chunkData = await lclient.getInfo()
} catch (err) {
console.error(`Lookup client '${client}' had an error:`, err)
}
// TODO: Turn into GQL error
if (chunkData?.errors)
throw new Error(chunkData.errors[0].message)
// If chunkData doesn't exist, stub it for normalization
if (!chunkData || typeof chunkData !== 'object')
chunkData = {}
chunkData.id = chunkID
//console.log(chunkData)
return chunkData
// Normalize data and save to DB
const normalizedChunkData = lookupClient.normalize(chunkData)
console.log(normalizedChunkData)
//return normalizedChunkData // FIXME: Remove after testing
// TODO: insert into DB with a timeout, so that items will resolve after an hour or so. How does this affect other depot stuffs?
// Need to rethink this part. Right now, inserting it into DB is a way to stop hitting lookup clients.
db.set(dbTransformer, client, normalizedChunkData)
// TODO: Use gameUpdateCheck() idea.
// Once a day, we can phone home and retrieve lastBuildNumber and check for app updates that are in our local DB.
// This feature will be opt-in, because many might not like it.
// This sends GAME items with build number and checks the diff on meta API.
// Process child depots
/* FIXME: Make this an array.
const depotRefs = normalizedChunkData.depots || {}
for (let childDepots of Object.keys(depotRefs)) {
const childDepot = depotRefs[childDepots]
// Create child depot in DB
db.set(dbTransformer, client, childDepot)
}*/
// TODO: If we have a parent, go ahead and run it through processItem() too.
return normalizedChunkData
}
}
function useClient(client) {
if (!clients[client]) {
console.error(`Error: Lookup client '${client}' does not exist! Falling back to generic client.`)
return clients.generic
}
return clients[client]
}
module.exports = chunkInfo

122
src/lib/database.js Normal file
View File

@@ -0,0 +1,122 @@
'use strict'
const sqlite3 = require('better-sqlite3')
const db = new sqlite3('data/gameinfo.db', /*{ verbose: console.log }*/)
module.exports = class database {
static prepare(table, tableKeys) {
if (typeof tableKeys !== 'object')
throw new Error('Invalid tableKeys passed to database.prepare')
let tableKeysStr = ''
let index = 0
for (let tableKey of Object.keys(tableKeys)) {
if (tableKey == 'init')
continue
tableKeysStr += (index > 0) ? ', ' + tableKey : tableKey
index++
}
// TODO: Object.keys(tableKeys).join(', ')
const stmt = db.prepare(`CREATE TABLE IF NOT EXISTS ${table} (${tableKeysStr})`)
stmt.run()
}
static get(table, keyName, keyValue) {
const stmt = db.prepare(`SELECT * FROM ${table} WHERE ${keyName} = ?`)
const res = stmt.get(keyValue)
return res
}
static has(table, keyName, keyValue) {
const res = this.get(table, keyName, keyValue)
// Because it may return undefined or null for specific value
if (!res)
return false
return true
}
static serialize(transformer, data, dbObj) {
// Call init function on transformer if it exists, for transformations.
if (typeof transformer.init === 'function')
transformer.init(data, dbObj)
// Only serialize stuff in the transformer
const returnObj = {}
for (let transformKey of Object.keys(transformer)) {
// If no data to operate on, skip
if (typeof data[transformKey] === 'undefined' || typeof data[transformKey] === 'null')
continue
const transformValue = transformer[transformKey]
if (typeof transformValue === 'function') {
//console.log(schemaKey, 'is function')
const fnResult = transformValue(dbObj[transformKey], data[transformKey], dbObj)
returnObj[transformKey] = fnResult
continue
}
if (typeof transformValue === 'object') {
if (typeof transformValue.serialize === 'function') {
const fnResult = transformValue.serialize(dbObj[transformKey], data[transformKey], dbObj)
returnObj[transformKey] = fnResult
continue
}
if (typeof transformValue.deserialize === 'function') {
const fnResult = transformValue.serialize(dbObj[transformKey], data[transformKey], dbObj)
returnObj[transformKey] = fnResult
continue
}
}
returnObj[transformKey] = data[transformKey]
}
// Only update items that have changed.
for (let returnObjKeys of Object.keys(returnObj)) {
if (returnObj[returnObjKeys] == dbObj[returnObjKeys])
delete returnObj[returnObjKeys]
}
return returnObj
}
static set(transformer, table, data) {
const dbObj = this.get(table, 'id', data.id)
// Item exists in the DB already
if (dbObj) {
const serializedObj = this.serialize(transformer, data, dbObj)
const keys = Object.keys(serializedObj)
// Nothing to update
if (keys.length === 0)
return //console.warn(`UPDATE ${table} WHERE id = '${data.id}' - Nothing to update`)
const insert = db.prepare(`UPDATE ${table} SET ${keys.map(x => x + " = ?").join(", ")} WHERE id = '${data.id}'`)
insert.run(...Object.values(serializedObj))
} else {
const serializedObj = this.serialize(transformer, data, {})
const keys = Object.keys(serializedObj)
// Nothing to insert
if (keys.length === 0)
return console.warn(`INSERT INTO ${table} for '${data.id}' - Nothing to insert`)
const insert = db.prepare(`INSERT INTO ${table} (${keys.join(", ")}) VALUES (${"?, ".repeat(keys.length).slice(0, -2)})`)
insert.run(...Object.values(serializedObj))
}
}
}
// TODO: Add typeof checks, against schema
// TODO: add .serialize and .deserialize

84
src/lib/logtail.js Normal file
View File

@@ -0,0 +1,84 @@
'use strict'
const showDebug = process.env.DEBUG || true // Show debug info
const EventEmitter = require('events')
const { spawn } = require('child_process')
//const { spawn } = require('bun-utilities/spawn')
// https://nodejs.org/api/readline.html#readline
const readline = require('node:readline/promises')
class logtail extends EventEmitter {
constructor(logFile, prefill) {
super()
this.logFile = logFile
this.prefill = prefill
this.spawn = {}
}
async start() {
showDebug && console.log('Creating logtail instance:', this.logFile)
// TODO: path, check if this.logFile exists, if not fail with error.
// Log file does not exist. Can not read log file
// TODO: check existence of `this.prefill` to determine cmd and cmdOpts, cat or tail
// FIXME: Instead of this, what about an "Import" feature on UI in settings. The txt file will be uploaded then processed.
// Replicate code as logimport.js
// Or rename `prefill` to be `import`, because most of the code should stay the same...
// TODO: Think about how prefill performance would be with the event emitter attached to every line... it's needed tho for `stats` but not
// necessarily for pubSub.. actually wrapping another logtail instance for the UI feature could be done.. then handle the stats page, etc from there.
let cmd, cmdOpts
if (this.prefill) {
cmd = 'cat'
cmdOpts = [this.logFile]
} else {
cmd = 'tail'
cmdOpts = ['-f', this.logFile, '--lines', 0]
}
const child = spawn(cmd, cmdOpts)
this.child = child
let rl
child.on('close', code => {
if (code === 0) {
if (rl && typeof rl.close === 'function')
rl.close()
this.emit('close')
} else {
this.emit('error', 'Process unexpectedly quit, code: ' + code)
console.error('Child process closed, code:', code)
}
})
rl = readline.createInterface({
input: child.stdout,
output: null,
})
for await (const line of rl) {
processLine.call(this, line)
}
}
}
function processLine(line) {
if (typeof line !== 'string')
return
const regexMatch = /^\[(?<service>[A-Za-z0-9_.]+)\]\s(?<client>[0-9.]+).+?(?=\[)\[(?<timestamp>.+?(?=\]))\]\s"(?<method>[A-Z]+)\s(?<url>\S+).+?(?=")"\s(?<statusCode>[0-9]+)\s(?<size>[0-9]+)\s"-"\s"(.*?)"\s"(?<cache>[A-Z]+)?/
const matches = line.match(regexMatch)
if (matches && matches.groups) {
this.emit('item', matches.groups)
} else {
showDebug && console.log(`WARN: Unhandled log Event: ${line}`)
}
}
module.exports = logtail

96
src/main.ts Normal file
View File

@@ -0,0 +1,96 @@
import { createServer, createPubSub } from '@graphql-yoga/node'
import * as fs from 'fs'
import * as path from 'path'
import logtail from './lib/logtail.js'
import chunkInfo from './lib/chunkInfo.js'
import stats from './stats.js'
const pubSub = createPubSub()
// FIXME: Use a .env file + docker environment variable for log file location
const logFileLocation = '/run/user/1000/gvfs/sftp:host=10.0.1.101,user=root/mnt/user/appdata/lancache/logs/access.log'
const tailInstance = new logtail(logFileLocation, false)
chunkInfo.init()
// stats.init(pubSub)
const typeDefs = fs.readFileSync(path.join(process.cwd(), "schema.graphql"), {
encoding: 'utf-8',
})
function newPubSub(name: string) {
return {
subscribe: (_, {}, { pubSub } ) => pubSub.subscribe(name),
resolve: (payload) => payload,
}
}
const resolvers = {
Query: {
stats: () => stats.getStats(),
active: () => stats.getActive(),
// top, bySize
lookup: async (_, { service, url }) => {
const chunk = await chunkInfo.process(service, url)
console.log(`${chunk.name} (`, Number(chunk.id), ')', (chunk.parent ? `-> ${chunk.parent.name} (${Number(chunk.parent.id)})` : ''))
return chunk
}
},
Subscription: {
stats: newPubSub('stats'),
active: newPubSub('active'),
//top: newPubSub('top'),
//bySize: newPubSub('bySize'),
}
}
/* TODO: Add type checking to pubsub
const pubSub = createPubSub<{
data: [payload: any]
}>()
*/
const server = createServer({
endpoint: '/api',
port: 3002,
logging: true,
schema: {
typeDefs,
resolvers,
},
context: {
pubSub,
},
})
tailInstance.on('item', async (logItem) => {
// Don't handle lancache heartbeats. We may use this in the future for something.
if (logItem.url === '/lancache-heartbeat')
return
// Process chunk
const gameInfo = await chunkInfo.process(logItem.service, logItem.url)
pubSub.publish('stats', stats.addRequest(gameInfo, logItem))
const activeGames = stats.addActive(gameInfo, logItem)
pubSub.publish('active', activeGames)
console.log(`${gameInfo.name} (`, Number(gameInfo.id), ')', (gameInfo.parent ? `-> ${gameInfo.parent.name} (${Number(gameInfo.parent.id)})` : ''))
})
setInterval(() => {
const activeGames = stats.getActive()
pubSub.publish('active', activeGames)
}, 30 * 1000)
server.start()
tailInstance.start()
// pnpm dev

116
src/stats.js Normal file
View File

@@ -0,0 +1,116 @@
'use strict'
const activeGames = [] // TODO: replace with Set
let totalReqs = 0
let totalHits = 0
let totalMisses = 0
function gameTimeout(game) {
const timeout = setTimeout(() => {
//removeGame(game.id)
let removeInactiveGameIndex = activeGames.findIndex(gameItem => gameItem.id === game.id)
if (removeInactiveGameIndex === -1)
return
activeGames.splice(removeInactiveGameIndex, 1)
//console.log('Should remove game:', game.name, 'timeout:', timeout)
// TODO: update pubSub
}, 30 * 1000)
//console.log('Create gameTimeout:', game.name, 'timeout:', timeout)
return timeout
}
function parseActiveItem(gameInfo, logItem) {
// If item is a child, retrieve parent's info
// TODO: simplify this by gameInfo(gameInfo.parent)
if (gameInfo.parent && gameInfo.parent.name) {
const gqlItem = {
id: gameInfo.parent.id,
service: logItem.service,
client: logItem.client, // TODO: determine IP/DHCP hostname (for now, just export IP and then we can do GQL resolveClient(ip) => dhcp), GQL should cache that
name: gameInfo.parent.name,
content: gameInfo.name,
}
return gqlItem
} else {
const gqlItem = {
id: gameInfo.id,
service: logItem.service,
client: logItem.client,
name: gameInfo.name,
content: '', // temp fix for UI
}
return gqlItem
}
}
module.exports = {
addActive: function(gameInfo, logItem) {
const game = parseActiveItem(gameInfo, logItem)
// FIXME: This won't work for multiple clients, different DLC, etc.. Need to fix the filter...
const foundActiveGame = activeGames.find(gameItem => gameItem.id === game.id)
if (foundActiveGame) { // lastSeen
//console.log('clearTimeout:', game.name, 'timeout:', foundActiveGame.timeout)
clearTimeout(foundActiveGame.timeout)
foundActiveGame.timeout = gameTimeout(game)
return activeGames
}
game.timeout = gameTimeout(game)
activeGames.push(game)
return activeGames
},
getStats() {
return {
totalReqs,
totalHits,
totalMisses,
}
},
getActive() {
return activeGames
},
addRequest(gameInfo, logItem) {
totalReqs++
this.addHitMissRatio(gameInfo, logItem)
// TODO: publish to db for resume support.
return {
totalReqs,
totalHits,
totalMisses,
}
},
addHitMissRatio(gameInfo, logItem) {
// TODO: add miss/hit stat to individual item
if (logItem.cache === 'HIT')
totalHits++
else
totalMisses++
},
}
// lastAccess
function findGameRoot() {
// If this is a child depot, traverse until we have parent.
}

103
tsconfig.json Normal file
View File

@@ -0,0 +1,103 @@
{
"compilerOptions": {
/* Visit https://aka.ms/tsconfig to read more about this file */
/* Projects */
// "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */
// "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */
// "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */
// "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */
// "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */
// "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */
/* Language and Environment */
"target": "es2016", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */
// "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */
// "jsx": "preserve", /* Specify what JSX code is generated. */
// "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */
// "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */
// "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */
// "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */
// "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */
// "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */
// "noLib": true, /* Disable including any library files, including the default lib.d.ts. */
// "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */
// "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */
/* Modules */
"module": "commonjs", /* Specify what module code is generated. */
// "rootDir": "./", /* Specify the root folder within your source files. */
// "moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */
// "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */
// "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */
// "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */
// "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */
// "types": [], /* Specify type package names to be included without being referenced in a source file. */
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
// "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */
// "resolveJsonModule": true, /* Enable importing .json files. */
// "noResolve": true, /* Disallow 'import's, 'require's or '<reference>'s from expanding the number of files TypeScript should add to a project. */
/* JavaScript Support */
// "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */
// "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */
// "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */
/* Emit */
// "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */
// "declarationMap": true, /* Create sourcemaps for d.ts files. */
// "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */
// "sourceMap": true, /* Create source map files for emitted JavaScript files. */
// "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */
// "outDir": "./", /* Specify an output folder for all emitted files. */
// "removeComments": true, /* Disable emitting comments. */
// "noEmit": true, /* Disable emitting files from a compilation. */
// "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */
// "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */
// "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */
// "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
// "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */
// "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */
// "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */
// "newLine": "crlf", /* Set the newline character for emitting files. */
// "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */
// "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */
// "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */
// "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */
// "declarationDir": "./", /* Specify the output directory for generated declaration files. */
// "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */
/* Interop Constraints */
// "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */
// "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */
"esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */
// "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */
"forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */
/* Type Checking */
"strict": true, /* Enable all strict type-checking options. */
"noImplicitAny": false, /* Enable error reporting for expressions and declarations with an implied 'any' type. */
// "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */
// "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */
// "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */
// "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */
// "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */
// "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */
// "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */
// "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */
// "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */
// "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */
// "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */
// "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */
// "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */
// "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */
// "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */
// "allowUnusedLabels": true, /* Disable error reporting for unused labels. */
// "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */
/* Completeness */
// "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */
"skipLibCheck": true /* Skip type checking all .d.ts files. */
}
}