diff --git a/README.md b/README.md index c4c9c7c..863292d 100644 --- a/README.md +++ b/README.md @@ -64,7 +64,6 @@ This will allow using the production API without CORS issues. - AUTH0_CLIENT_SECRET: Auth0 client secret - AUTH0_DOMAIN: Auth0 domain - CAUR_DEPLOY_LOG_ID: Telegram chat id for deploy logs -- CAUR_GITLAB_TOKEN: Gitlab token for pushing to the repository - CAUR_JWT_SECRET: JWT secret for the backend - CAUR_NEWS_ID: Telegram chat id for news - CAUR_TRUST_PROXY: IP address of the proxy, if any diff --git a/backend/src/config/repo-manager.config.ts b/backend/src/config/repo-manager.config.ts index 69e6e1f..8b10224 100644 --- a/backend/src/config/repo-manager.config.ts +++ b/backend/src/config/repo-manager.config.ts @@ -4,7 +4,6 @@ export default registerAs("repoMan", () => ({ gitAuthor: process.env.GIT_AUTHOR ?? "Temeraire", gitEmail: process.env.GIT_EMAIL ?? "ci@chaotic.cx", gitUsername: process.env.GIT_USERNAME ?? "git", - gitlabToken: process.env.CAUR_GITLAB_TOKEN, globalBlacklist: process.env.REPOMANAGER_NEVER_REBUILD ?? "[]", globalTriggers: process.env.REPOMANAGER_ALWAYS_REBUILD ?? "[]", regenDatabase: process.env.REPOMANAGER_REGEN_DB ?? false, diff --git a/backend/src/interfaces/repo-manager.ts b/backend/src/interfaces/repo-manager.ts index ec1ba58..39c470f 100644 --- a/backend/src/interfaces/repo-manager.ts +++ b/backend/src/interfaces/repo-manager.ts @@ -1,5 +1,5 @@ -import { Package } from "../builder/builder.entity"; -import { ArchlinuxPackage } from "../repo-manager/repo-manager.entity"; +import type { Package } from "../builder/builder.entity"; +import type { ArchlinuxPackage } from "../repo-manager/repo-manager.entity"; export interface Repo { name: string; @@ -48,7 +48,6 @@ export interface RepoSettings { gitAuthor: string; gitEmail: string; gitUsername: string; - gitlabToken: string; globalBlacklist: string[]; globalTriggers: string[]; regenDatabase: boolean; diff --git a/backend/src/repo-manager/repo-manager.service.ts b/backend/src/repo-manager/repo-manager.service.ts index 682c801..7994105 100644 --- a/backend/src/repo-manager/repo-manager.service.ts +++ b/backend/src/repo-manager/repo-manager.service.ts @@ -33,12 +33,12 @@ import { IsNull, MoreThanOrEqual, Not, Repository } from "typeorm"; import { ARCH } from "../constants"; import { Build, Package, pkgnameExists, Repo } from "../builder/builder.entity"; import { ConfigService } from "@nestjs/config"; -import { AxiosResponse } from "axios"; +import type { AxiosResponse } from "axios"; import { bumpTypeToText, isValidUrl } from "../functions"; import { CronJob } from "cron"; import util from "node:util"; import { exec } from "node:child_process"; -import { AES } from "crypto-js"; +import { AES, enc } from "crypto-js"; @Injectable() export class RepoManagerService { @@ -87,33 +87,37 @@ export class RepoManagerService { // We explicitly want to encrypt API tokens if they are prefixed with "CLEAR:" try { - const reposWithTokens = await this.repoRepository.find({where: {apiToken: Not(IsNull())}}) - const dbKey = this.configService.getOrThrow("app.dbKey") + const reposWithTokens = await this.repoRepository.find({ where: { apiToken: Not(IsNull()) } }); + const dbKey = this.configService.getOrThrow("app.dbKey"); for (const repo of reposWithTokens) { if (repo.apiToken.startsWith("CLEAR:")) { - repo.apiToken = AES.encrypt(repo.apiToken.split(":")[1], dbKey).toString() + const token = repo.apiToken.split(":")[1] + repo.apiToken = AES.encrypt(JSON.stringify({ token }), dbKey).toString(); + await this.repoRepository.save(repo) } - Logger.log(`Encrypted token for repo ${repo.name}`) + Logger.log(`Encrypted token for repo ${repo.name}`); } - } catch(err: unknown) { - Logger.error(err, "RepoManager") + } catch (err: unknown) { + Logger.error(err, "RepoManager"); } try { if (globalTriggers && globalTriggers.length > 0) { if (existingSettings?.value) { - const existing: string[] = JSON.parse(existingSettings.value); - - for (const key of existing) { - if (!globalTriggers.includes(key)) { - globalTriggers.push(key); + try { + Logger.log(existingSettings); + const existing: string[] = JSON.parse(existingSettings.value); + for (const key of existing) { + if (!globalTriggers.includes(key)) { + globalTriggers.push(key); + } } - } - await this.settingsRepository.update( - { key: "alwaysRebuild" }, - { value: JSON.stringify(globalTriggers) }, - ); + await this.settingsRepository.update( + { key: "alwaysRebuild" }, + { value: JSON.stringify(globalTriggers) }, + ); + } catch (err: unknown) {} } else { await this.settingsRepository.save({ key: "globalTriggers", @@ -122,7 +126,9 @@ export class RepoManagerService { } } else { if (existingSettings) { - globalTriggers.push(...JSON.parse(existingSettings.value)); + try { + globalTriggers.push(...JSON.parse(existingSettings.value)); + } catch (err: unknown) {} } } } catch (err: unknown) { @@ -179,7 +185,6 @@ export class RepoManagerService { gitAuthor: this.configService.getOrThrow("repoMan.gitAuthor"), gitEmail: this.configService.getOrThrow("repoMan.gitEmail"), gitUsername: this.configService.getOrThrow("repoMan.gitUsername"), - gitlabToken: this.configService.getOrThrow("repoMan.gitlabToken"), globalTriggers: globalTriggers ?? JSON.parse(this.configService.getOrThrow("repoMan.globalTriggers")), globalBlacklist: @@ -496,7 +501,7 @@ class RepoManager { settings: Repository; }; private readonly httpService: HttpService; - private readonly configService: ConfigService + private readonly configService: ConfigService; private repoDirs: string[] = []; private repoManagerSettings: RepoSettings; @@ -511,7 +516,7 @@ class RepoManager { settings: Repository; }, settings: RepoSettings, - configService: ConfigService + configService: ConfigService, ) { this.httpService = httpService; this.dbConnections = dbConnections; @@ -639,39 +644,41 @@ class RepoManager { } } - if (!foundTrigger && soProvidingArchPackages.length > 0 && metadata?.deps) { - const trigger = soProvidingArchPackages.find((soProviding) => { - const hasSoDep = metadata?.deps?.some((dep) => { - const pkgNoSo = dep.split(".so")[0]; - - // TODO: probably too sensitive and causing too many builds - return soProviding.provides.some( - (pkg) => pkg.includes(pkgNoSo) || soProviding.pkg.pkgname.includes(pkgNoSo), - ); - }); - - if (hasSoDep) { - Logger.debug(`Found shared library trigger ${soProviding.pkg.pkgname} by name`, "RepoManager"); - return true; - } - return false; - }); - if (trigger) { - needsRebuild.push({ - archPkg: trigger.pkg, - configs: pkgConfig.configs, - pkg: pkgConfig.pkgInDb, - bumpType: BumpType.FROM_DEPS, - triggerFrom: TriggerType.ARCH, - }); - - Logger.debug( - `Rebuilding ${pkgbaseDir} because of changed shared library ${trigger.pkg.pkgname}`, - "RepoManager", - ); - foundTrigger = true; - } - } + // if (!foundTrigger && soProvidingArchPackages.length > 0 && metadata?.deps) { + // const trigger = soProvidingArchPackages.find((soProviding) => { + // const hasSoDep = metadata?.deps?.some((dep) => { + // const pkgNoSo = dep.split(".so")[0]; + // + // // TODO: probably too sensitive and causing too many builds + // return soProviding.provides.some( + // (pkg) => pkg.includes(pkgNoSo) || soProviding.pkg.pkgname.includes(pkgNoSo), + // ); + // }); + // + // if (hasSoDep) { + // Logger.debug(`Found shared library trigger ${soProviding.pkg.pkgname} by name`, "RepoManager"); + // return true; + // } + // return false; + // }); + // if (trigger) { + // needsRebuild.push({ + // archPkg: trigger.pkg, + // configs: pkgConfig.configs, + // pkg: pkgConfig.pkgInDb, + // bumpType: BumpType.FROM_DEPS, + // triggerFrom: TriggerType.ARCH, + // }); + // + // Logger.debug( + // `Rebuilding ${pkgbaseDir} because of changed shared library ${trigger.pkg.pkgname}`, + // "RepoManager", + // ); + // foundTrigger = true; + // } + // } + + Logger.debug(pkgConfig.pkgInDb.namcapAnalysis); if (!foundTrigger && pkgConfig.pkgInDb.namcapAnalysis) { const namcapAnalysis: Partial = pkgConfig.pkgInDb.namcapAnalysis; @@ -683,10 +690,14 @@ class RepoManager { "link-level-dependence", ]; + Logger.debug(`searching shared lib for ${pkgConfig.pkgInDb.pkgname}`); + Logger.debug(namcapAnalysis); + for (const key of relevantKeys) { let trigger: ArchlinuxPackage; if (namcapAnalysis[key]) { for (const depPkg of namcapAnalysis[key]) { + Logger.debug(`${depPkg}`); const foundSoProvider: { pkg: ArchlinuxPackage; provides: string[]; @@ -1225,14 +1236,14 @@ class RepoManager { } try { - const token = AES.decrypt(repo.apiToken, this.configService.getOrThrow("app.dbKey")).toString() + const token = JSON.parse(AES.decrypt(repo.apiToken, this.configService.getOrThrow("app.dbKey")).toString(enc.Utf8)); await git.push({ fs, http, dir: repoDir, onAuth: () => ({ username: this.repoManagerSettings.gitUsername, - password: token, + password: token.token, }), }); @@ -1460,7 +1471,10 @@ class RepoManager { if (!repoDir) { repoDir = await this.createRepoDir(build.repo); } else { - const token = AES.decrypt(build.repo.apiToken, this.configService.getOrThrow("app.dbKey")).toString() + const token = JSON.parse(AES.decrypt( + build.repo.apiToken, + this.configService.getOrThrow("app.dbKey"), + ).toString(enc.Utf8)); await git.pull({ fs, http, @@ -1468,7 +1482,7 @@ class RepoManager { author: { name: this.repoManagerSettings.gitAuthor, email: this.repoManagerSettings.gitEmail }, onAuth: () => ({ username: this.repoManagerSettings.gitUsername, - password: token, + password: token.token, }), }); } @@ -1490,9 +1504,12 @@ class RepoManager { pkg.pkgname, ); + Logger.log(pkg.metadata, "CheckDeps") + Logger.log(soNameList, "CheckDeps") + if ( - (pkg.metadata.deps && pkg.metadata.deps.includes(build.pkgbase.pkgname)) || - soNameList.find((soName) => pkg.metadata.deps.includes(soName)) + (pkg.metadata?.deps?.includes(build.pkgbase.pkgname)) || + soNameList.find((soName) => pkg.metadata?.deps?.includes(soName)) ) { needsRebuild.push({ configs: configs.configs,