Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,17 +1,19 @@
import { fetchPinsGroupedByBranch } from "../../../pinStrategy";
import {
fetchPinsGroupedByBranch,
fetchPinsOlderThan
} from "../../../pinStrategy";
import { cliArgsToReleaseUploaderProvider } from "../../../releaseUploader";
import { Github } from "../../../providers/github/Github";
import { PinataPinManager } from "../../../providers/pinata/pinManager";
import { readManifest } from "../../../files";
import { Apm } from "../../../utils/Apm";

/**
* Removes all pins associated with a branch that no longer exists
* Remove pins in the following conditions:
* - pins associated with a branch that no longer exists
* - pins older than a month
*/
export async function cleanPinsFromDeletedBranches({
dir
}: {
dir: string;
}): Promise<void> {
export async function cleanPins({ dir }: { dir: string }): Promise<void> {
// Read manifest from disk to get package name
const { manifest } = readManifest({ dir });

Expand All @@ -28,6 +30,7 @@ export async function cleanPinsFromDeletedBranches({
throw Error("Must use pinata for deletePins");
const pinata = new PinataPinManager(releaseUploaderProvider);

// CLEAN PINS FROM DELETED BRANCHES
const pinsGroupedByBranch = await fetchPinsGroupedByBranch(pinata, manifest);
for (const { branch, pins } of pinsGroupedByBranch) {
if (branches.find(b => b.name === branch)) continue;
Expand All @@ -41,4 +44,22 @@ export async function cleanPinsFromDeletedBranches({
});
}
}

// CLEAN OLD PINS (>30 days) NOT RELEASED
const pinsOlderThan = await fetchPinsOlderThan(pinata, manifest, 30);
const apm = new Apm("remote");
const packageProductionHashes = await apm.getIpfsHashesFromDnpName(
manifest.name
);
for (const pin of pinsOlderThan) {
// Do not unpin if it is a production IPFS hash
if (packageProductionHashes.find(ipfsHash => ipfsHash === pin.ipfsHash))
continue;

console.log(`Unpin ${pin.commit} ${pin.ipfsHash}`);
await pinata.unpin(pin.ipfsHash).catch(e => {
// Don't prevent unpinning other pins if one is faulty
console.error(`Error on unpin ${pin.ipfsHash}`, e);
});
}
}
4 changes: 2 additions & 2 deletions src/commands/githubActions/build/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import { buildHandler } from "../../build";
import { Github } from "../../../providers/github/Github";
import { parseRef } from "../../../providers/github/utils";
import { getBuildBotComment, isTargetComment } from "./botComment";
import { cleanPinsFromDeletedBranches } from "./cleanPinsFromDeletedBranches";
import { cleanPins } from "./cleanPins";

// This action should be run on 'push' and 'pull_request' events
//
Expand Down Expand Up @@ -45,7 +45,7 @@ export async function gaBuildHandler({
// Doing it here prevents having to add two workflows per repo.
// Also, ensures that pins are deleted eventually, even if this fails sometimes
try {
await cleanPinsFromDeletedBranches({ dir });
await cleanPins({ dir });
} catch (e) {
console.error("Error on cleanPinsFromDeletedBranches", e);
}
Expand Down
29 changes: 29 additions & 0 deletions src/pinStrategy/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -80,3 +80,32 @@ export async function fetchPinsWithBranchToDelete(
const pins = await fetchPinsWithBranch(pinata, manifest, gitHead);
return pins.filter(pin => pin.commit && pin.commit !== gitHead.commit);
}

/**
* Fetch pins with same branch, assuming pins are upload with `DnpPinMetadata` metadata.
*/
export async function fetchPinsOlderThan(
pinata: PinataPinManager,
manifest: Manifest,
days: number
): Promise<PinDataSummary[]> {
const dateNow = new Date();
const pins = await pinata.pinList<DnpPinMetadata>({
status: "pinned",
keyvalues: {
dnpName: { value: manifest.name, op: "eq" }
}
});
return pins
.filter(pin => {
// return pins older the days provided
const datePinned = new Date(pin.date_pinned);
const dateDiff = Math.abs(dateNow.getTime() - datePinned.getTime());
const diffDays = Math.ceil(dateDiff / (1000 * 3600 * 24));
return diffDays > days;
})
.map(pin => ({
commit: pin.metadata.keyvalues?.commit,
ipfsHash: pin.ipfs_pin_hash
}));
}
3 changes: 2 additions & 1 deletion src/providers/pinata/pinManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,15 @@ export class PinataPinManager {
}

/**
* GET https://api.pinata.cloud/data/pinList
* GET https://docs.pinata.cloud/pinata-api/data/query-files#querying-files
* This endpoint returns data on what content the sender has pinned to IPFS through Pinata.
* The purpose of this endpoint is to provide insight into what is being pinned, and how long it has been pinned.
* The results of this call can be filtered using multiple query parameters that will be discussed below.
*/
async pinList<PinKeyvalues>(filters?: {
name?: string;
status?: "all" | "pinned" | "unpinned";
pinStart?: string; // date format ISO_8601
keyvalues?: {
// Each query on custom values takes the form of an object with a "value" key, and an "op" key.
// The "value" is fairly straightforward. This is simply the value that you wish your query operation to be applied to
Expand Down
27 changes: 26 additions & 1 deletion src/utils/Apm.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { ethers } from "ethers";
import { BigNumber, ethers } from "ethers";
import { arrayToSemver } from "../utils/arrayToSemver";
import repoAbi from "../contracts/RepoAbi.json";
import registryAbi from "../contracts/ApmRegistryAbi.json";
Expand Down Expand Up @@ -57,6 +57,31 @@ export class Apm {
}
}

/**
* Returns all the IPFS hashes from a given package dnpName
*/
async getIpfsHashesFromDnpName(dnpName: string): Promise<string[]> {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Here you need to add hashes which were uploaded using old method. For example, this is the hash saved onchain of one of the first releases of DAppManager: https://gateway.ipfs.dappnode.io/ipfs/QmdjatAF1DeifJyZNLtefWsXDPZgFSGFU3e19QGsjYkGih
As you can see, that is just the manifest. So you need to parse that manifest and get image and avatar hash from there.

const ensName = await this.resolve(dnpName);
if (!ensName) throw Error(`Error: ${dnpName} not found`);
const repository = await this.getRepoContract(ensName);
if (!repository) throw Error(`Error: ${dnpName} has no repo`);

const numberOfPackageVersions = await repository
.getVersionsCount()
.then((res: BigNumber) => res.toNumber());
console.log(`Found ${numberOfPackageVersions} versions of ${dnpName}`);

const packageIpfsHashes: string[] = [];
for (let i = 1; i <= numberOfPackageVersions; i++) {
const packageData = await repository.getByVersionId(i);
const packageIpfsHash = ethers.utils
.toUtf8String(packageData.contentURI)
.replace(/^\/ipfs\//, "");
packageIpfsHashes.push(packageIpfsHash);
}
return packageIpfsHashes;
}

/**
* Get the lastest version of an APM repo contract for an ENS domain.
*
Expand Down
18 changes: 18 additions & 0 deletions test/utils/Apm.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,4 +39,22 @@ describe("Apm constructor", function () {
`Resulting version is not a valid semver: ${semver}`
);
});

it("Should get all the IPFS hashes from a given dnpName", async () => {
const expectedIpfsHashes = [
"QmdSKdCBQ5Jy1GGDJkBMFi3LtkbCckoRwXoM67pNezdG1x",
"QmTZDB4Mq1SpSk2iB211f8EHxgiP6tcTrtZAZayEzZMuev",
"Qmabpx7SrMP9hie8Qh7fhpuEnji13V3fNmNBe1XrSVTfDp",
"QmbDew4vuHa8cmJ2KH2bt9Pyd3uRq7j6Uezwb9rz5oozfU",
"QmZAy91PeBJvg7ruv7tuEavvbGAEWDHxS7m4HeMxb7wBjQ",
"QmVycHEPxcdVzw9sYmjJfq6Tj2hJpQwqYQWGg81baUJuD2",
"QmbAtNRRRSojMRTWNoqH9qMWVWLRatpUbEyG9cNfS5W1Hw"
];
const apm = new Apm("infura");
const ipfsHashes = await apm.getIpfsHashesFromDnpName(
"web3signer-gnosis.dnp.dappnode.eth"
);

expect(ipfsHashes).to.deep.equal(expectedIpfsHashes);
});
});