Export des transactions en .csv

Bonjour

sur la v1 de Cesium, il y avait une fonction bien utile (pour les trésoriers d’asso, par exemple :wink: ) qui permettait d’exporter au format .csv l’historique des transactions d’un portefeuille.

Peut-on espérer le retour d’une telle possibilité sur Cesium² ?

D’avance merci :folded_hands:

3 Likes

Bonjour, une réponse, peut-être ?. … :folded_hands:

Oui, vu le nombre de bugs bloquants sur cesium, cette fonctionnalité arrivera plus tard. En attendant, voici un petit script rédigé par une IA qui permet ça :

#!/usr/bin/env python3
"""
Export transaction history for a Duniter account from a Squid GraphQL endpoint.

Usage:
    python export_transfers.py <address> [--endpoint URL] [--output FILE]
"""

import argparse
import csv
import sys
import json

try:
    import urllib.request
    import urllib.error
except ImportError:
    pass

DEFAULT_ENDPOINT = "http://localhost:4350/graphql"

QUERY = """
query GetTransfers($accountId: String!, $first: Int!, $after: Cursor) {
  transfers(
    filter: {
      or: [
        { fromId: { equalTo: $accountId } }
        { toId: { equalTo: $accountId } }
      ]
    }
    orderBy: [BLOCK_NUMBER_ASC]
    first: $first
    after: $after
  ) {
    edges {
      cursor
      node {
        id
        blockNumber
        timestamp
        from { id }
        to { id }
        amount
        comment { remark }
      }
    }
    pageInfo {
      hasNextPage
      endCursor
    }
  }
}
"""

PAGE_SIZE = 1000


def graphql_request(endpoint: str, query: str, variables: dict) -> dict:
    payload = json.dumps({"query": query, "variables": variables}).encode("utf-8")
    req = urllib.request.Request(
        endpoint,
        data=payload,
        headers={"Content-Type": "application/json", "Accept": "application/json"},
    )
    try:
        with urllib.request.urlopen(req) as resp:
            result = json.loads(resp.read().decode("utf-8"))
    except urllib.error.HTTPError as e:
        body = e.read().decode("utf-8")
        print(f"HTTP error {e.code}: {body}", file=sys.stderr)
        sys.exit(1)
    except urllib.error.URLError as e:
        print(f"Connection error: {e.reason}", file=sys.stderr)
        sys.exit(1)

    if "errors" in result:
        print("GraphQL errors:", file=sys.stderr)
        for err in result["errors"]:
            print(f"  {err.get('message', err)}", file=sys.stderr)
        sys.exit(1)

    return result["data"]


def fetch_all_transfers(endpoint: str, account_id: str) -> list:
    transfers = []
    after = None

    while True:
        count = len(transfers)
        print(f"  Fetching transfers {count}–{count + PAGE_SIZE - 1}...", file=sys.stderr, end="\r")
        variables = {"accountId": account_id, "first": PAGE_SIZE}
        if after:
            variables["after"] = after
        data = graphql_request(endpoint, QUERY, variables)
        conn = data.get("transfers", {})
        edges = conn.get("edges", [])
        transfers.extend(edge["node"] for edge in edges)
        page_info = conn.get("pageInfo", {})
        if not page_info.get("hasNextPage"):
            break
        after = page_info["endCursor"]

    print(f"  Fetched {len(transfers)} transfers.          ", file=sys.stderr)
    return transfers


def write_csv(transfers: list, account_id: str, output) -> None:
    writer = csv.writer(output)
    writer.writerow(["id", "block_number", "timestamp", "direction", "counterpart", "amount_ud", "comment"])

    for t in transfers:
        direction = "out" if t["from"]["id"] == account_id else "in"
        counterpart = t["to"]["id"] if direction == "out" else t["from"]["id"]
        comment = t["comment"]["remark"] if t.get("comment") else ""
        # amount is in UD cents (integer), display as-is
        writer.writerow([
            t["id"],
            t["blockNumber"],
            t["timestamp"],
            direction,
            counterpart,
            t["amount"],
            comment,
        ])


def main():
    parser = argparse.ArgumentParser(description="Export Duniter account transfer history to CSV.")
    parser.add_argument("address", help="SS58 account address")
    parser.add_argument("--endpoint", default=DEFAULT_ENDPOINT, help=f"GraphQL endpoint (default: {DEFAULT_ENDPOINT})")
    parser.add_argument("--output", "-o", default="-", help="Output CSV file (default: stdout)")
    args = parser.parse_args()

    print(f"Exporting transfers for {args.address} from {args.endpoint}", file=sys.stderr)

    transfers = fetch_all_transfers(args.endpoint, args.address)

    if not transfers:
        print("No transfers found.", file=sys.stderr)

    if args.output == "-":
        write_csv(transfers, args.address, sys.stdout)
    else:
        with open(args.output, "w", newline="", encoding="utf-8") as f:
            write_csv(transfers, args.address, f)
        print(f"Saved to {args.output}", file=sys.stderr)


if __name__ == "__main__":
    main()

Il s’appelle comme ça :

python export_transfers.py g1Kah4oS1PLp8N6QwbiVXpqfgqomoF1offxbtvx2aeMZbEvHe --endpoint https://squid.g1.coinduf.eu/v1/graphql --output transfers.csv

Ça devrait dépanner en attendant qu’on ajoute ça dans Cesium².

2 Likes

As tu essayer Tikka ?

Arf @HugoTrentesaux à mon avis tu aurais aussi vite fait de coder une MR dans Cesium2, non ? :slight_smile:
C’est très simple grâce à ta query GraphQL

3 Likes

Merci à vous.

Je vais voir ce que je peux faire avec ton script…

Quant à Tikka, j’ai deja répondu que j’attends de pouvoir faire une install automatique ( avec un .deb, par exemple).

Sinon, ben… j’attendrais, il n’y a pa urgence de toute façon.

Oui, tu as raison, il faut que je contribue aussi à Cesium. Mais je n’arrive pas à me trouver un moment calme pour faire ça :confused: