refactor: use SkillClient, remove all auth concerns from business logic

Replaced manual config/token/retry handling with createSkillClient().
scrape.ts is now pure data transformation with zero auth imports.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
ywkj 2026-03-17 08:17:53 +08:00
parent f3483033db
commit 6cf3753e12
3 changed files with 34 additions and 96 deletions

View File

@ -1,98 +1,61 @@
import type { Command, OutputResult, ScrapePayload } from './types.js';
import { createEnvConfig, getAccessToken, fetchSessionJson } from '@clawd/auth-runtime';
import { buildPayloadFromUrl, validatePayloadJson, scrapeProduct } from './scrape.js';
import { createSkillClient } from '@clawd/auth-runtime';
import { buildPayloadFromUrl, validatePayloadJson } from './scrape.js';
export async function run1688(
command: Command,
args: string[],
dryRun: boolean = false,
): Promise<OutputResult> {
const config = createEnvConfig();
const ecomBase = (process.env.ECOM_BASE || config.authBase).replace(/\/$/, '');
if (!config.clientKey) {
return failed(command, dryRun, 'missing required env: CLIENT_KEY');
let client;
try {
client = createSkillClient({ dryRun });
} catch (error) {
return failed(command, dryRun, error instanceof Error ? error.message : String(error));
}
switch (command) {
case 'session':
return runSession(command, dryRun, config);
case 'scrape-url':
return runScrapeUrl(command, dryRun, config, ecomBase, args);
case 'scrape-payload':
return runScrapePayload(command, dryRun, config, ecomBase, args);
case 'session': {
try {
const session = await client.session();
return { status: 'success', error: null, command, dryRun, session };
} catch (error) {
return failed(command, dryRun, error instanceof Error ? error.message : String(error));
}
}
case 'scrape-url': {
const url = args[0];
if (!url) return failed(command, dryRun, 'scrape-url requires <1688-url>');
const defaults = readDefaults();
const payload = buildPayloadFromUrl(url, args[1] || '', defaults);
return runScrape(client, command, dryRun, payload);
}
case 'scrape-payload': {
const rawPayload = args[0];
if (!rawPayload) return failed(command, dryRun, 'scrape-payload requires <payload-json>');
try {
const payload = validatePayloadJson(rawPayload);
return runScrape(client, command, dryRun, payload);
} catch (error) {
return failed(command, dryRun, error instanceof Error ? error.message : String(error));
}
}
default:
return failed(command, dryRun, `unknown command: ${command}`);
}
}
async function runSession(
command: string,
dryRun: boolean,
config: ReturnType<typeof createEnvConfig>,
): Promise<OutputResult> {
const session = await fetchSessionJson(dryRun, config);
return { status: 'success', error: null, command, dryRun, session };
}
async function runScrapeUrl(
command: string,
dryRun: boolean,
config: ReturnType<typeof createEnvConfig>,
ecomBase: string,
args: string[],
): Promise<OutputResult> {
const url = args[0];
if (!url) {
return failed(command, dryRun, 'scrape-url requires <1688-url>');
}
const defaults = readDefaults();
const payload = buildPayloadFromUrl(url, args[1] || '', defaults);
return runScrape(command, dryRun, config, ecomBase, payload);
}
async function runScrapePayload(
command: string,
dryRun: boolean,
config: ReturnType<typeof createEnvConfig>,
ecomBase: string,
args: string[],
): Promise<OutputResult> {
const rawPayload = args[0];
if (!rawPayload) {
return failed(command, dryRun, 'scrape-payload requires <payload-json>');
}
let payload: ScrapePayload;
try {
payload = validatePayloadJson(rawPayload);
} catch (error) {
return failed(command, dryRun, error instanceof Error ? error.message : String(error));
}
return runScrape(command, dryRun, config, ecomBase, payload);
}
async function runScrape(
client: ReturnType<typeof createSkillClient>,
command: string,
dryRun: boolean,
config: ReturnType<typeof createEnvConfig>,
ecomBase: string,
payload: ScrapePayload,
): Promise<OutputResult> {
if (dryRun) {
return { status: 'success', error: null, command, dryRun, requestPayload: payload, scrapeHttpStatus: 0, scrapeBody: null };
}
let accessToken: string;
try {
accessToken = await getAccessToken(dryRun, config);
} catch (error) {
return failed(command, dryRun, error instanceof Error ? error.message : 'failed to get access token');
}
const result = await scrapeProduct(config, ecomBase, payload, dryRun, accessToken);
const result = await client.post('/ecom/tasks/scrape', payload);
if (result.status < 200 || result.status >= 300) {
return failed(command, dryRun, `scrape failed: HTTP ${result.status}: ${result.body}`, payload, result.status);

View File

@ -1,6 +1,4 @@
import type { ScrapePayload } from './types.js';
import { requestApiWithAutoRefresh } from '@clawd/auth-runtime';
import type { ApiResponse, EnvConfig } from '@clawd/auth-runtime';
type Defaults = {
optimizeImages: boolean;
@ -59,23 +57,6 @@ export function validatePayloadJson(raw: string): ScrapePayload {
};
}
export async function scrapeProduct(
config: EnvConfig,
ecomBase: string,
payload: ScrapePayload,
dryRun: boolean,
accessToken?: string,
): Promise<ApiResponse> {
return requestApiWithAutoRefresh(
'POST',
`${ecomBase}/ecom/tasks/scrape`,
dryRun,
config,
JSON.stringify(payload),
accessToken,
);
}
function parseBoolean(value: unknown): boolean {
const str = String(value).trim().toLowerCase();
return ['1', 'true', 'yes', 'y'].includes(str);

View File

@ -6,12 +6,6 @@ export interface ScrapePayload {
needTranslate: boolean;
}
export interface ScrapeResponse {
[key: string]: unknown;
}
export type ApiResponse = import("@clawd/auth-runtime").ApiResponse;
export type Command = "session" | "scrape-url" | "scrape-payload";
export interface OutputResult {