diff --git a/src/commands/githubActions/build/cleanPinsFromDeletedBranches.ts b/src/commands/githubActions/build/cleanPins.ts similarity index 57% rename from src/commands/githubActions/build/cleanPinsFromDeletedBranches.ts rename to src/commands/githubActions/build/cleanPins.ts index 97fb3db1..ab96e1c4 100644 --- a/src/commands/githubActions/build/cleanPinsFromDeletedBranches.ts +++ b/src/commands/githubActions/build/cleanPins.ts @@ -1,17 +1,19 @@ -import { fetchPinsGroupedByBranch } from "../../../pinStrategy"; +import { + fetchPinsGroupedByBranch, + fetchPinsOlderThan +} from "../../../pinStrategy"; import { cliArgsToReleaseUploaderProvider } from "../../../releaseUploader"; import { Github } from "../../../providers/github/Github"; import { PinataPinManager } from "../../../providers/pinata/pinManager"; import { readManifest } from "../../../files"; +import { Apm } from "../../../utils/Apm"; /** - * Removes all pins associated with a branch that no longer exists + * Remove pins in the following conditions: + * - pins associated with a branch that no longer exists + * - pins older than a month */ -export async function cleanPinsFromDeletedBranches({ - dir -}: { - dir: string; -}): Promise { +export async function cleanPins({ dir }: { dir: string }): Promise { // Read manifest from disk to get package name const { manifest } = readManifest({ dir }); @@ -28,6 +30,7 @@ export async function cleanPinsFromDeletedBranches({ throw Error("Must use pinata for deletePins"); const pinata = new PinataPinManager(releaseUploaderProvider); + // CLEAN PINS FROM DELETED BRANCHES const pinsGroupedByBranch = await fetchPinsGroupedByBranch(pinata, manifest); for (const { branch, pins } of pinsGroupedByBranch) { if (branches.find(b => b.name === branch)) continue; @@ -41,4 +44,22 @@ export async function cleanPinsFromDeletedBranches({ }); } } + + // CLEAN OLD PINS (>30 days) NOT RELEASED + const pinsOlderThan = await fetchPinsOlderThan(pinata, manifest, 30); + const apm = new Apm("remote"); + const packageProductionHashes = await apm.getIpfsHashesFromDnpName( + manifest.name + ); + for (const pin of pinsOlderThan) { + // Do not unpin if it is a production IPFS hash + if (packageProductionHashes.find(ipfsHash => ipfsHash === pin.ipfsHash)) + continue; + + console.log(`Unpin ${pin.commit} ${pin.ipfsHash}`); + await pinata.unpin(pin.ipfsHash).catch(e => { + // Don't prevent unpinning other pins if one is faulty + console.error(`Error on unpin ${pin.ipfsHash}`, e); + }); + } } diff --git a/src/commands/githubActions/build/index.ts b/src/commands/githubActions/build/index.ts index 1a967de9..c99555bc 100644 --- a/src/commands/githubActions/build/index.ts +++ b/src/commands/githubActions/build/index.ts @@ -6,7 +6,7 @@ import { buildHandler } from "../../build"; import { Github } from "../../../providers/github/Github"; import { parseRef } from "../../../providers/github/utils"; import { getBuildBotComment, isTargetComment } from "./botComment"; -import { cleanPinsFromDeletedBranches } from "./cleanPinsFromDeletedBranches"; +import { cleanPins } from "./cleanPins"; // This action should be run on 'push' and 'pull_request' events // @@ -45,7 +45,7 @@ export async function gaBuildHandler({ // Doing it here prevents having to add two workflows per repo. // Also, ensures that pins are deleted eventually, even if this fails sometimes try { - await cleanPinsFromDeletedBranches({ dir }); + await cleanPins({ dir }); } catch (e) { console.error("Error on cleanPinsFromDeletedBranches", e); } diff --git a/src/pinStrategy/index.ts b/src/pinStrategy/index.ts index 3e7743cd..3e93b0cf 100644 --- a/src/pinStrategy/index.ts +++ b/src/pinStrategy/index.ts @@ -80,3 +80,32 @@ export async function fetchPinsWithBranchToDelete( const pins = await fetchPinsWithBranch(pinata, manifest, gitHead); return pins.filter(pin => pin.commit && pin.commit !== gitHead.commit); } + +/** + * Fetch pins with same branch, assuming pins are upload with `DnpPinMetadata` metadata. + */ +export async function fetchPinsOlderThan( + pinata: PinataPinManager, + manifest: Manifest, + days: number +): Promise { + const dateNow = new Date(); + const pins = await pinata.pinList({ + status: "pinned", + keyvalues: { + dnpName: { value: manifest.name, op: "eq" } + } + }); + return pins + .filter(pin => { + // return pins older the days provided + const datePinned = new Date(pin.date_pinned); + const dateDiff = Math.abs(dateNow.getTime() - datePinned.getTime()); + const diffDays = Math.ceil(dateDiff / (1000 * 3600 * 24)); + return diffDays > days; + }) + .map(pin => ({ + commit: pin.metadata.keyvalues?.commit, + ipfsHash: pin.ipfs_pin_hash + })); +} diff --git a/src/providers/pinata/pinManager.ts b/src/providers/pinata/pinManager.ts index b3db5754..410e2099 100644 --- a/src/providers/pinata/pinManager.ts +++ b/src/providers/pinata/pinManager.ts @@ -21,7 +21,7 @@ export class PinataPinManager { } /** - * GET https://api.pinata.cloud/data/pinList + * GET https://docs.pinata.cloud/pinata-api/data/query-files#querying-files * This endpoint returns data on what content the sender has pinned to IPFS through Pinata. * The purpose of this endpoint is to provide insight into what is being pinned, and how long it has been pinned. * The results of this call can be filtered using multiple query parameters that will be discussed below. @@ -29,6 +29,7 @@ export class PinataPinManager { async pinList(filters?: { name?: string; status?: "all" | "pinned" | "unpinned"; + pinStart?: string; // date format ISO_8601 keyvalues?: { // Each query on custom values takes the form of an object with a "value" key, and an "op" key. // The "value" is fairly straightforward. This is simply the value that you wish your query operation to be applied to diff --git a/src/utils/Apm.ts b/src/utils/Apm.ts index e80582fa..5ea75ea7 100644 --- a/src/utils/Apm.ts +++ b/src/utils/Apm.ts @@ -1,4 +1,4 @@ -import { ethers } from "ethers"; +import { BigNumber, ethers } from "ethers"; import { arrayToSemver } from "../utils/arrayToSemver"; import repoAbi from "../contracts/RepoAbi.json"; import registryAbi from "../contracts/ApmRegistryAbi.json"; @@ -57,6 +57,31 @@ export class Apm { } } + /** + * Returns all the IPFS hashes from a given package dnpName + */ + async getIpfsHashesFromDnpName(dnpName: string): Promise { + const ensName = await this.resolve(dnpName); + if (!ensName) throw Error(`Error: ${dnpName} not found`); + const repository = await this.getRepoContract(ensName); + if (!repository) throw Error(`Error: ${dnpName} has no repo`); + + const numberOfPackageVersions = await repository + .getVersionsCount() + .then((res: BigNumber) => res.toNumber()); + console.log(`Found ${numberOfPackageVersions} versions of ${dnpName}`); + + const packageIpfsHashes: string[] = []; + for (let i = 1; i <= numberOfPackageVersions; i++) { + const packageData = await repository.getByVersionId(i); + const packageIpfsHash = ethers.utils + .toUtf8String(packageData.contentURI) + .replace(/^\/ipfs\//, ""); + packageIpfsHashes.push(packageIpfsHash); + } + return packageIpfsHashes; + } + /** * Get the lastest version of an APM repo contract for an ENS domain. * diff --git a/test/utils/Apm.test.ts b/test/utils/Apm.test.ts index 0b47df00..57ceb905 100644 --- a/test/utils/Apm.test.ts +++ b/test/utils/Apm.test.ts @@ -39,4 +39,22 @@ describe("Apm constructor", function () { `Resulting version is not a valid semver: ${semver}` ); }); + + it("Should get all the IPFS hashes from a given dnpName", async () => { + const expectedIpfsHashes = [ + "QmdSKdCBQ5Jy1GGDJkBMFi3LtkbCckoRwXoM67pNezdG1x", + "QmTZDB4Mq1SpSk2iB211f8EHxgiP6tcTrtZAZayEzZMuev", + "Qmabpx7SrMP9hie8Qh7fhpuEnji13V3fNmNBe1XrSVTfDp", + "QmbDew4vuHa8cmJ2KH2bt9Pyd3uRq7j6Uezwb9rz5oozfU", + "QmZAy91PeBJvg7ruv7tuEavvbGAEWDHxS7m4HeMxb7wBjQ", + "QmVycHEPxcdVzw9sYmjJfq6Tj2hJpQwqYQWGg81baUJuD2", + "QmbAtNRRRSojMRTWNoqH9qMWVWLRatpUbEyG9cNfS5W1Hw" + ]; + const apm = new Apm("infura"); + const ipfsHashes = await apm.getIpfsHashesFromDnpName( + "web3signer-gnosis.dnp.dappnode.eth" + ); + + expect(ipfsHashes).to.deep.equal(expectedIpfsHashes); + }); });