work on postgress / will prob remove and work on ib exchanges and symbols

This commit is contained in:
Boki 2025-06-13 19:59:35 -04:00
parent cce5126cb7
commit a20a11c1aa
16 changed files with 1441 additions and 95 deletions

6
.env
View file

@ -27,9 +27,9 @@ DRAGONFLY_PASSWORD=
# PostgreSQL Configuration
POSTGRES_HOST=localhost
POSTGRES_PORT=5432
POSTGRES_DB=stockbot
POSTGRES_USER=postgres
POSTGRES_PASSWORD=postgres
POSTGRES_DATABASE=trading_bot
POSTGRES_USERNAME=trading_user
POSTGRES_PASSWORD=trading_pass_dev
POSTGRES_SSL=false
# QuestDB Configuration

View file

@ -4,7 +4,7 @@
import { Hono } from 'hono';
import { Browser } from '@stock-bot/browser';
import { loadEnvVariables } from '@stock-bot/config';
import { getLogger } from '@stock-bot/logger';
import { getLogger, shutdownLoggers } from '@stock-bot/logger';
import { Shutdown } from '@stock-bot/shutdown';
import { initializeIBResources } from './providers/ib.tasks';
import { initializeProxyResources } from './providers/proxy.tasks';
@ -99,7 +99,38 @@ shutdown.onShutdown(async () => {
logger.info('Queue manager shut down successfully');
} catch (error) {
logger.error('Error shutting down queue manager', { error });
throw error; // Re-throw to mark shutdown as failed
// Don't re-throw to allow other shutdown handlers to complete
// The shutdown library tracks failures internally
}
});
// Add Browser shutdown handler
shutdown.onShutdown(async () => {
logger.info('Shutting down browser resources...');
try {
await Browser.close();
logger.info('Browser resources shut down successfully');
} catch (error) {
// Browser might already be closed by running tasks, this is expected
const errorMessage = error instanceof Error ? error.message : String(error);
if (errorMessage.includes('Target page, context or browser has been closed')) {
logger.info('Browser was already closed by running tasks');
} else {
logger.error('Error shutting down browser resources', { error });
}
// Don't throw here as browser shutdown shouldn't block app shutdown
}
});
// Add logger shutdown handler (should be last)
shutdown.onShutdown(async () => {
try {
await shutdownLoggers();
// Use process.stdout since loggers are being shut down
process.stdout.write('All loggers flushed and shut down successfully\n');
} catch (error) {
process.stderr.write(`Error shutting down loggers: ${error}\n`);
// Don't throw here as this is the final cleanup
}
});

View file

@ -6,21 +6,26 @@ const logger = getLogger('ib-provider');
export const ibProvider: ProviderConfig = {
name: 'ib',
operations: {
'ib-symbol-summary': async () => {
'ib-basics': async () => {
const { ibTasks } = await import('./ib.tasks');
logger.info('Fetching symbol summary from IB');
const total = await ibTasks.fetchSymbolSummary();
const sessionHeaders = await ibTasks.fetchSession();
logger.info('Fetched symbol summary from IB', {
count: total,
sessionHeaders,
});
return total;
// Get Exchanges
logger.info('Fetching exchanges from IB');
const exchanges = await ibTasks.fetchExchanges(sessionHeaders);
logger.info('Fetched exchanges from IB', { exchanges });
// return total;
},
},
scheduledJobs: [
{
type: 'ib-symbol-summary',
operation: 'ib-symbol-summary',
type: 'ib-basics',
operation: 'ib-basics',
payload: {},
// should remove and just run at the same time so app restarts dont keeping adding same jobs
cronPattern: '*/2 * * * *',

View file

@ -32,121 +32,138 @@ export async function initializeIBResources(waitForCache = false): Promise<void>
isInitialized = true;
}
export async function fetchSymbolSummary(): Promise<number> {
export async function fetchSession(): Promise<Record<string, string> | undefined> {
try {
await Browser.initialize({ headless: true, timeout: 10000, blockResources: false });
logger.info('✅ Browser initialized');
const { page, contextId } = await Browser.createPageWithProxy(
const { page } = await Browser.createPageWithProxy(
'https://www.interactivebrokers.com/en/trading/products-exchanges.php#/',
'http://doimvbnb-US-rotate:w5fpiwrb9895@p.webshare.io:80'
);
logger.info('✅ Page created with proxy');
let summaryData: any = null; // Initialize summaryData to store API response
let eventCount = 0;
page.onNetworkEvent(event => {
if (event.url.includes('/webrest/search/product-types/summary')) {
console.log(`🎯 Found summary API call: ${event.type} ${event.url}`);
if (event.type === 'response' && event.responseData) {
console.log(`📊 Summary API Response Data: ${event.responseData}`);
try {
summaryData = JSON.parse(event.responseData) as any;
const totalCount = summaryData[0].totalCount;
console.log('📊 Summary API Response:', JSON.stringify(summaryData, null, 2));
console.log(`🔢 Total symbols found: ${totalCount || 'Unknown'}`);
} catch (e) {
console.log('📊 Raw Summary Response:', event.responseData);
const headersPromise = new Promise<Record<string, string> | undefined>(resolve => {
let resolved = false;
page.onNetworkEvent(event => {
if (event.url.includes('/webrest/search/product-types/summary')) {
if (event.type === 'request') {
try {
resolve(event.headers);
} catch (e) {
resolve(undefined);
console.log('📊 Raw Summary Response:', (e as Error).message);
}
}
}
}
eventCount++;
logger.info(`📡 Event ${eventCount}: ${event.type} ${event.url}`);
});
// Timeout fallback
setTimeout(() => {
if (!resolved) {
resolved = true;
logger.warn('Timeout waiting for headers');
resolve(undefined);
}
}, 30000);
});
logger.info('⏳ Waiting for page load...');
await page.waitForLoadState('domcontentloaded', { timeout: 20000 });
logger.info('✅ Page loaded');
// RIGHT HERE - Interact with the page to find Stocks checkbox and Apply button
//Products tabs
logger.info('🔍 Looking for Products tab...');
// Wait for the page to fully load
await page.waitForTimeout(20000);
// First, click on the Products tab
const productsTab = page.locator('#productSearchTab[role="tab"][href="#products"]');
await productsTab.waitFor({ timeout: 20000 });
await productsTab.waitFor({ timeout: 5000 });
logger.info('✅ Found Products tab');
logger.info('🖱️ Clicking Products tab...');
await productsTab.click();
logger.info('✅ Products tab clicked');
// Wait for the tab content to load
await page.waitForTimeout(5000);
// New Products Checkbox
logger.info('🔍 Looking for "New Products Only" radio button...');
const radioButton = page.locator('span.checkbox-text:has-text("New Products Only")');
await radioButton.waitFor({ timeout: 5000 });
logger.info(`🎯 Found "New Products Only" radio button`);
await radioButton.first().click();
logger.info('✅ "New Products Only" radio button clicked');
// Click on the Asset Classes accordion to expand it
logger.info('🔍 Looking for Asset Classes accordion...');
const assetClassesAccordion = page.locator(
'#products .accordion-item #acc-products .accordion_btn:has-text("Asset Classes")'
);
await assetClassesAccordion.waitFor({ timeout: 10000 });
logger.info('✅ Found Asset Classes accordion');
// Wait for and return headers immediately when captured
logger.info('⏳ Waiting for headers to be captured...');
const headers = await headersPromise;
logger.info('🖱️ Clicking Asset Classes accordion...');
await assetClassesAccordion.click();
logger.info('✅ Asset Classes accordion clicked');
// Wait for the accordion content to expand
await page.waitForTimeout(2000);
logger.info('🔍 Looking for Stocks checkbox...');
// Find the span with class "fs-7 checkbox-text" and inner text containing "Stocks"
const stocksSpan = page.locator('span.fs-7.checkbox-text:has-text("Stocks")');
await stocksSpan.waitFor({ timeout: 10000 });
logger.info('✅ Found Stocks span');
// Find the checkbox by looking in the same parent container
const parentContainer = stocksSpan.locator('..');
const checkbox = parentContainer.locator('input[type="checkbox"]');
if ((await checkbox.count()) > 0) {
logger.info('📋 Clicking Stocks checkbox...');
await checkbox.first().check();
logger.info('✅ Stocks checkbox checked');
if (headers) {
logger.info('✅ Headers captured successfully');
} else {
logger.info('⚠️ Could not find checkbox near Stocks text');
logger.warn('⚠️ No headers were captured');
}
// Wait a moment for any UI updates
await page.waitForTimeout(1000);
// Find and click the nearest Apply button
logger.info('🔍 Looking for Apply button...');
const applyButton = page.locator(
'button:has-text("Apply"), input[type="submit"][value*="Apply"], input[type="button"][value*="Apply"]'
);
if ((await applyButton.count()) > 0) {
logger.info('🎯 Clicking Apply button...');
await applyButton.first().click();
logger.info('✅ Apply button clicked');
// Wait for any network requests triggered by the Apply button
await page.waitForTimeout(2000);
} else {
logger.info('⚠️ Could not find Apply button');
}
return 0;
return headers;
} catch (error) {
logger.error('Failed to fetch IB symbol summary', { error });
return 0;
return;
}
}
export async function fetchExchanges(sessionHeaders: Record<string, string>): Promise<any> {
try {
logger.info('🔍 Fetching exchanges with session headers...');
// The URL for the exchange data API
const exchangeUrl = 'https://www.interactivebrokers.com/webrest/exchanges';
// Configure the proxy
const proxyUrl = 'http://doimvbnb-US-rotate:w5fpiwrb9895@p.webshare.io:80';
// Prepare headers - include all session headers plus any additional ones
const requestHeaders = {
...sessionHeaders,
Accept: 'application/json, text/plain, */*',
'Accept-Language': 'en-US,en;q=0.9',
'Cache-Control': 'no-cache',
Pragma: 'no-cache',
'Sec-Fetch-Dest': 'empty',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Site': 'same-origin',
'X-Requested-With': 'XMLHttpRequest',
};
logger.info('📤 Making request to exchange API...', {
url: exchangeUrl,
headerCount: Object.keys(requestHeaders).length,
});
// Use fetch with proxy configuration
const response = await fetch(exchangeUrl, {
method: 'GET',
headers: requestHeaders,
proxy: proxyUrl,
});
if (!response.ok) {
logger.error('❌ Exchange API request failed', {
status: response.status,
statusText: response.statusText,
});
return null;
}
const data = await response.json();
logger.info('✅ Exchange data fetched successfully', {
dataKeys: Object.keys(data || {}),
dataSize: JSON.stringify(data).length,
});
return data;
} catch (error) {
logger.error('❌ Failed to fetch exchanges', { error });
return null;
}
}
// Optional: Export a convenience object that groups related tasks
export const ibTasks = {
fetchSymbolSummary,
fetchSession,
fetchExchanges,
};

View file

@ -4,8 +4,10 @@
"": {
"name": "stock-bot",
"dependencies": {
"@types/pg": "^8.15.4",
"bullmq": "^5.53.2",
"ioredis": "^5.6.1",
"pg": "^8.16.0",
"playwright": "^1.53.0",
},
"devDependencies": {
@ -163,6 +165,21 @@
"typescript": "^5.0.0",
},
},
"libs/browser": {
"name": "@stock-bot/browser",
"version": "1.0.0",
"dependencies": {
"playwright": "^1.53.0",
},
"devDependencies": {
"@types/node": "^20.0.0",
"typescript": "^5.0.0",
},
"peerDependencies": {
"@stock-bot/http": "workspace:*",
"@stock-bot/logger": "workspace:*",
},
},
"libs/cache": {
"name": "@stock-bot/cache",
"version": "1.0.0",
@ -312,6 +329,14 @@
"typescript": "^5.3.0",
},
},
"libs/proxy": {
"name": "@stock-bot/proxy",
"version": "1.0.0",
"devDependencies": {
"@types/node": "^20.0.0",
"typescript": "^5.0.0",
},
},
"libs/questdb-client": {
"name": "@stock-bot/questdb-client",
"version": "1.0.0",
@ -815,6 +840,8 @@
"@socket.io/component-emitter": ["@socket.io/component-emitter@3.1.2", "", {}, "sha512-9BCxFwvbGg/RsZK9tjXd8s4UcwR0MWeFQ1XEKIQVVvAGJyINdrqKMcTRyLoK8Rse1GjzLV9cwjWV1olXRWEXVA=="],
"@stock-bot/browser": ["@stock-bot/browser@workspace:libs/browser"],
"@stock-bot/cache": ["@stock-bot/cache@workspace:libs/cache"],
"@stock-bot/config": ["@stock-bot/config@workspace:libs/config"],
@ -841,6 +868,8 @@
"@stock-bot/processing-service": ["@stock-bot/processing-service@workspace:apps/processing-service"],
"@stock-bot/proxy": ["@stock-bot/proxy@workspace:libs/proxy"],
"@stock-bot/questdb-client": ["@stock-bot/questdb-client@workspace:libs/questdb-client"],
"@stock-bot/shutdown": ["@stock-bot/shutdown@workspace:libs/shutdown"],
@ -2379,6 +2408,8 @@
"@parcel/watcher/node-addon-api": ["node-addon-api@7.1.1", "", {}, "sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ=="],
"@stock-bot/browser/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="],
"@stock-bot/cache/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="],
"@stock-bot/config/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="],
@ -2419,6 +2450,8 @@
"@stock-bot/postgres-client/eslint": ["eslint@8.57.1", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", "@eslint/eslintrc": "^2.1.4", "@eslint/js": "8.57.1", "@humanwhocodes/config-array": "^0.13.0", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", "@ungap/structured-clone": "^1.2.0", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", "debug": "^4.3.2", "doctrine": "^3.0.0", "escape-string-regexp": "^4.0.0", "eslint-scope": "^7.2.2", "eslint-visitor-keys": "^3.4.3", "espree": "^9.6.1", "esquery": "^1.4.2", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^6.0.1", "find-up": "^5.0.0", "glob-parent": "^6.0.2", "globals": "^13.19.0", "graphemer": "^1.4.0", "ignore": "^5.2.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "is-path-inside": "^3.0.3", "js-yaml": "^4.1.0", "json-stable-stringify-without-jsonify": "^1.0.1", "levn": "^0.4.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", "optionator": "^0.9.3", "strip-ansi": "^6.0.1", "text-table": "^0.2.0" }, "bin": { "eslint": "bin/eslint.js" } }, "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA=="],
"@stock-bot/proxy/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="],
"@stock-bot/questdb-client/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="],
"@stock-bot/questdb-client/@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@6.21.0", "", { "dependencies": { "@eslint-community/regexpp": "^4.5.1", "@typescript-eslint/scope-manager": "6.21.0", "@typescript-eslint/type-utils": "6.21.0", "@typescript-eslint/utils": "6.21.0", "@typescript-eslint/visitor-keys": "6.21.0", "debug": "^4.3.4", "graphemer": "^1.4.0", "ignore": "^5.2.4", "natural-compare": "^1.4.0", "semver": "^7.5.4", "ts-api-utils": "^1.0.1" }, "peerDependencies": { "@typescript-eslint/parser": "^6.0.0 || ^6.0.0-alpha", "eslint": "^7.0.0 || ^8.0.0" } }, "sha512-oy9+hTPCUFpngkEZUSzbf9MxI65wbKFoQYsgPdILTfbUldp5ovUuphZVe4i30emU9M/kP+T64Di0mxl7dSw3MA=="],

View file

@ -0,0 +1,51 @@
-- =============================================================================
-- Interactive Brokers Simple Schema Setup
-- =============================================================================
-- Create dedicated schema for IB data
CREATE SCHEMA IF NOT EXISTS ib_data;
-- =============================================================================
-- Simple Exchanges Table
-- =============================================================================
CREATE TABLE IF NOT EXISTS ib_data.exchanges (
id SERIAL PRIMARY KEY,
exchange_code VARCHAR(20) NOT NULL UNIQUE,
exchange_name TEXT NOT NULL,
country VARCHAR(100),
region VARCHAR(50),
country_code VARCHAR(3),
assets TEXT,
is_active BOOLEAN DEFAULT true,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
);
-- Create indexes for performance
CREATE INDEX IF NOT EXISTS idx_exchanges_code ON ib_data.exchanges(exchange_code);
CREATE INDEX IF NOT EXISTS idx_exchanges_country ON ib_data.exchanges(country_code);
CREATE INDEX IF NOT EXISTS idx_exchanges_region ON ib_data.exchanges(region);
CREATE INDEX IF NOT EXISTS idx_exchanges_active ON ib_data.exchanges(is_active);
-- =============================================================================
-- Permissions
-- =============================================================================
-- Grant usage on schema
GRANT USAGE ON SCHEMA ib_data TO PUBLIC;
-- Grant permissions on tables
GRANT SELECT, INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA ib_data TO PUBLIC;
GRANT USAGE, SELECT ON ALL SEQUENCES IN SCHEMA ib_data TO PUBLIC;
-- Set default permissions for future tables
ALTER DEFAULT PRIVILEGES IN SCHEMA ib_data GRANT SELECT, INSERT, UPDATE, DELETE ON TABLES TO PUBLIC;
ALTER DEFAULT PRIVILEGES IN SCHEMA ib_data GRANT USAGE, SELECT ON SEQUENCES TO PUBLIC;
-- =============================================================================
-- Comments
-- =============================================================================
COMMENT ON SCHEMA ib_data IS 'Interactive Brokers market data schema (simplified)';
COMMENT ON TABLE ib_data.exchanges IS 'Trading exchanges from Interactive Brokers';

View file

@ -0,0 +1,104 @@
# Interactive Brokers Database Setup
This directory contains the PostgreSQL schema setup for Interactive Brokers data.
## Quick Setup
### 1. **Create the Schema and Tables**
```bash
# Run the SQL schema setup
bun run db:setup
# Or manually with psql:
psql -U postgres -d stock_bot -f database/postgres/providers/01-ib.sql
```
### 2. **Populate with Exchange Data**
```bash
# Populate exchanges from ib-exchanges.json
bun run db:populate-ib
# Or run the complete setup (schema + data):
bun run db:setup-ib
```
## What Gets Created
### 📊 **Schema: `ib_data`**
- `exchanges` - All IB trading exchanges with metadata
- `asset_types` - Types of financial instruments (Stocks, Options, etc.)
- `exchange_assets` - Many-to-many mapping of exchanges to asset types
- `securities` - Individual tradeable instruments
- `market_data` - Real-time and historical price data
- `data_fetch_jobs` - Queue for data collection tasks
### 🔍 **Views**
- `exchanges_with_assets` - Exchanges with their supported asset types
- `latest_market_data` - Most recent market data per security
- `securities_full_view` - Securities with full exchange and asset type info
### ⚡ **Functions**
- `get_or_create_exchange()` - Utility to insert/update exchanges
- `add_assets_to_exchange()` - Parse and add asset types to exchanges
## Database Structure
```sql
-- Example queries you can run after setup:
-- View all exchanges with their supported assets
SELECT * FROM ib_data.exchanges_with_assets LIMIT 10;
-- Count exchanges by region
SELECT region, COUNT(*)
FROM ib_data.exchanges
GROUP BY region
ORDER BY COUNT(*) DESC;
-- Find exchanges that support stocks
SELECT e.exchange_code, e.exchange_name, e.country
FROM ib_data.exchanges e
JOIN ib_data.exchange_assets ea ON e.id = ea.exchange_id
JOIN ib_data.asset_types at ON ea.asset_type_id = at.id
WHERE at.code = 'Stocks'
ORDER BY e.exchange_code;
```
## Environment Variables
Set these in your `.env` file for the populate script:
```bash
DB_HOST=localhost
DB_PORT=5432
DB_NAME=stock_bot
DB_USER=postgres
DB_PASSWORD=your_password
```
## Integration with Your Code
The schema is designed to work with your existing `ib.tasks.ts` file:
```typescript
// Your fetchSession() function can now store data like:
import { query } from '@stock-bot/postgres-client';
// Create a fetch job
await query(`
INSERT INTO ib_data.data_fetch_jobs (job_type, status, metadata)
VALUES ('SYMBOL_SUMMARY', 'PENDING', $1)
`, [{ url: 'https://...', proxy: '...' }]);
// Store exchange data
const exchangeId = await query(`
SELECT ib_data.get_or_create_exchange($1, $2, $3, $4, $5)
`, ['NASDAQ', 'NASDAQ Global Select Market', 'United States', 'Americas', 'US']);
```
## Next Steps
1. ✅ Run the setup scripts
2. 🔧 Update your IB tasks to use the database
3. 📊 Start collecting market data
4. 🚀 Build your trading strategies on top of this data layer!

View file

@ -0,0 +1,56 @@
# Database Scripts
**Simplified database initialization system for Interactive Brokers data.**
## Quick Start
```bash
# Initialize everything (recommended)
bun run db:init
# Or run Interactive Brokers setup directly:
bun run db:setup-ib # Create schema and populate IB data
```
## What We Built
**Simplified from complex multi-table schema to exchanges-only**
**Single script setup** - `setup-ib.ts` handles both schema and data
**Structured logging** with `@stock-bot/logger`
**184 exchanges populated** from JSON data
**Proper error handling** with helpful troubleshooting messages
## Scripts
### `setup-ib.ts` - Interactive Brokers Complete Setup
**Main script for IB setup** - Sets up schema and populates exchange data in one go.
### `init.ts`
Main initialization script that orchestrates setup for all providers.
## Database Schema
### IB Data (`ib_data` schema)
- `exchanges` - Trading exchanges with metadata
- `upsert_exchange()` - Function to insert/update exchanges
## Package.json Commands
```json
{
"db:init": "Run complete database initialization",
"db:setup-ib": "Complete IB setup (schema + data)"
}
```
## Adding New Providers
1. Create `{provider}.sql` in `database/postgres/providers/`
2. Create `{provider}.ts` script
3. Add to `init.ts` and `package.json`
## Requirements
- PostgreSQL running
- Database configured in `.env`
- `ib-exchanges.json` file in `apps/data-service/src/setup/`

View file

@ -0,0 +1,41 @@
#!/usr/bin/env bun
/**
* Main database initialization script
* Sets up the database schema and populates with initial data
*/
import { getLogger } from '@stock-bot/logger';
import { setupIB } from './setup-ib';
const logger = getLogger('db-init');
async function main() {
logger.info('Starting database initialization');
try {
// Step 1: Setup Interactive Brokers (schema + data)
logger.info('Setting up Interactive Brokers (schema + data)');
await setupIB();
logger.info('IB setup completed');
// Future providers can be added here:
// await setupAlpaca();
// await setupPolygon();
logger.info('Database initialization completed successfully');
} catch (error) {
logger.error('Database initialization failed', { error });
process.exit(1);
}
}
// Run the script
if (import.meta.main) {
main().catch((error) => {
console.error('Init script failed:', error);
process.exit(1);
});
}
export { main as initDatabase };

View file

@ -0,0 +1,366 @@
#!/usr/bin/env bun
/**
* Interactive Brokers complete setup script
* Sets up schema and populates IB exchanges from ib-exchanges.json into PostgreSQL
*/
import { postgresConfig } from '@stock-bot/config';
import { getLogger } from '@stock-bot/logger';
import { PostgreSQLClient } from '@stock-bot/postgres-client';
import { readFileSync } from 'fs';
import { join } from 'path';
// Initialize logger
const logger = getLogger('ib-setup');
// Type definitions based on the JSON structure
interface IBExchange {
id: string;
name: string;
country: string;
region: string;
assets: string;
country_code: string;
}
async function connectToDatabase(): Promise<PostgreSQLClient> {
logger.info('Connecting to PostgreSQL', {
host: postgresConfig.POSTGRES_HOST,
port: postgresConfig.POSTGRES_PORT,
database: postgresConfig.POSTGRES_DATABASE
});
try {
const client = new PostgreSQLClient();
await client.connect();
logger.info('Connected to PostgreSQL database');
// Test the connection
const result = await client.query('SELECT version()');
const version = result.rows[0].version.split(' ')[0];
logger.info('PostgreSQL connection verified', { version });
return client;
} catch (error) {
logger.error('Failed to connect to PostgreSQL', { error });
throw error;
}
}
async function runSchemaSetup(client: PostgreSQLClient) {
try {
logger.info('Loading schema SQL file');
const schemaPath = join(process.cwd(), 'database/postgres/providers/01-ib.sql');
const schemaSql = readFileSync(schemaPath, 'utf-8');
logger.info('Executing schema setup');
// Execute the entire SQL file as one statement to handle multi-line functions
try {
await client.query(schemaSql);
logger.info('Schema setup completed successfully');
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : String(error);
// Check if it's just "already exists" errors
if (errorMessage.includes('already exists')) {
logger.info('Schema setup completed (some objects already existed)');
} else {
logger.error('Error executing schema setup', { error: errorMessage });
throw error;
}
}
// Verify the setup
await verifySchemaSetup(client);
} catch (error) {
logger.error('Schema setup failed', { error });
throw error;
}
}
async function verifySchemaSetup(client: PostgreSQLClient) {
logger.info('Verifying schema setup');
try {
// Check if schema exists
const schemaCheck = await client.query(`
SELECT schema_name
FROM information_schema.schemata
WHERE schema_name = 'ib_data'
`);
if (schemaCheck.rows.length === 0) {
throw new Error('ib_data schema was not created');
}
// Check tables
const tableCheck = await client.query(`
SELECT table_name
FROM information_schema.tables
WHERE table_schema = 'ib_data'
ORDER BY table_name
`);
const actualTables = tableCheck.rows.map((row: any) => row.table_name);
// Check functions
const functionCheck = await client.query(`
SELECT routine_name
FROM information_schema.routines
WHERE routine_schema = 'ib_data'
ORDER BY routine_name
`);
const functions = functionCheck.rows.map((row: any) => row.routine_name);
logger.info('Schema verification completed', {
schema: 'ib_data',
tables: actualTables,
functions: functions
});
} catch (error) {
logger.error('Schema verification failed', { error });
throw error;
}
}
async function loadExchangesData(): Promise<IBExchange[]> {
try {
// Look for the JSON file in the project root
const jsonPath = join(process.cwd(), 'apps/data-service/src/setup/ib-exchanges.json');
logger.info('Loading exchanges from file', { path: jsonPath });
const jsonData = readFileSync(jsonPath, 'utf-8');
// Remove comment lines if they exist
const cleanJsonData = jsonData.replace(/^\/\/.*$/gm, '');
const exchanges: IBExchange[] = JSON.parse(cleanJsonData);
// Filter out incomplete entries and deduplicate by exchange code
const validExchanges = exchanges.filter(exchange =>
exchange.id &&
exchange.name &&
exchange.country_code &&
exchange.id.trim() !== '' &&
exchange.name.trim() !== '' &&
exchange.country_code.trim() !== ''
);
// Deduplicate by exchange code (keep the first occurrence)
const exchangeMap = new Map<string, IBExchange>();
validExchanges.forEach(exchange => {
if (!exchangeMap.has(exchange.id)) {
exchangeMap.set(exchange.id, exchange);
}
});
const uniqueExchanges = Array.from(exchangeMap.values());
logger.info('Exchanges loaded successfully', {
totalExchanges: exchanges.length,
validExchanges: validExchanges.length,
uniqueExchanges: uniqueExchanges.length,
duplicatesRemoved: validExchanges.length - uniqueExchanges.length,
filteredOut: exchanges.length - validExchanges.length
});
if (validExchanges.length !== exchanges.length) {
logger.warn('Some exchanges were filtered out due to incomplete data', {
filteredCount: exchanges.length - validExchanges.length
});
}
if (uniqueExchanges.length !== validExchanges.length) {
logger.warn('Duplicate exchange codes found and removed', {
duplicateCount: validExchanges.length - uniqueExchanges.length
});
}
return uniqueExchanges;
} catch (error) {
logger.error('Error loading exchanges JSON', { error });
throw error;
}
}
async function populateExchanges(client: PostgreSQLClient, exchanges: IBExchange[]): Promise<void> {
logger.info('Starting batch exchange population', {
totalExchanges: exchanges.length
});
try {
// Use the new batchUpsert method for fast population
const result = await client.batchUpsert(
'ib_data.exchanges',
exchanges.map(ex => ({
exchange_code: ex.id,
exchange_name: ex.name,
country: ex.country || null,
region: ex.region || null,
country_code: ex.country_code,
assets: ex.assets || null
})),
'exchange_code',
{ chunkSize: 100 }
);
logger.info('Batch exchange population completed', {
insertedCount: result.insertedCount,
updatedCount: result.updatedCount,
totalProcessed: result.insertedCount + result.updatedCount
});
} catch (error) {
logger.error('Batch exchange population failed', { error });
throw error;
}
}
async function verifyData(client: PostgreSQLClient) {
logger.info('Verifying populated data');
try {
// Count exchanges
const exchangeCount = await client.query(`
SELECT COUNT(*) as count FROM ib_data.exchanges
`);
// Get exchanges by region
const regionStats = await client.query(`
SELECT region, COUNT(*) as count
FROM ib_data.exchanges
WHERE region IS NOT NULL
GROUP BY region
ORDER BY count DESC
`);
// Get sample exchanges
const sampleExchanges = await client.query(`
SELECT
exchange_code,
exchange_name,
country,
region,
country_code,
assets
FROM ib_data.exchanges
ORDER BY exchange_code
LIMIT 10
`);
const totalExchanges = exchangeCount.rows[0].count;
logger.info('Data verification completed', { totalExchanges });
if (regionStats.rows.length > 0) {
logger.info('Exchanges by region', {
regions: regionStats.rows.map((row: any) => ({
region: row.region,
count: row.count
}))
});
}
logger.info('Sample exchanges', {
samples: sampleExchanges.rows.slice(0, 5).map((row: any) => ({
code: row.exchange_code,
name: row.exchange_name,
country: row.country,
region: row.region,
assets: row.assets
}))
});
} catch (error) {
logger.error('Data verification failed', { error });
throw error;
}
}
async function main() {
logger.info('Starting Interactive Brokers complete setup (schema + data)');
logger.info('Database configuration', {
database: postgresConfig.POSTGRES_DATABASE,
host: postgresConfig.POSTGRES_HOST,
port: postgresConfig.POSTGRES_PORT,
user: postgresConfig.POSTGRES_USERNAME,
ssl: postgresConfig.POSTGRES_SSL
});
let client: PostgreSQLClient | null = null;
try {
// Connect to database
client = await connectToDatabase();
// Step 1: Setup schema
logger.info('Step 1: Setting up database schema');
await runSchemaSetup(client);
// Step 2: Load exchange data
logger.info('Step 2: Loading exchange data');
const exchanges = await loadExchangesData();
if (exchanges.length === 0) {
logger.warn('No valid exchanges found to process');
return;
}
// Step 3: Populate exchanges with batch upsert
logger.info('Step 3: Populating exchanges (batch mode)');
await populateExchanges(client, exchanges);
// Step 4: Verify the data
logger.info('Step 4: Verifying setup and data');
await verifyData(client);
logger.info('Interactive Brokers setup completed successfully');
logger.info('Next steps', {
suggestions: [
'Start your data service',
'Begin collecting market data',
'Connect to Interactive Brokers API'
]
});
} catch (error: unknown) {
logger.error('IB setup failed', { error });
// Provide helpful error messages
if (error && typeof error === 'object' && 'code' in error && error.code === 'ECONNREFUSED') {
logger.error('Database connection refused', {
troubleshooting: [
'Make sure PostgreSQL is running',
'Check your database configuration in .env file',
'Verify the database connection details'
]
});
} else if (error && typeof error === 'object' && 'message' in error &&
typeof error.message === 'string' &&
error.message.includes('database') &&
error.message.includes('does not exist')) {
logger.error('Database does not exist', {
suggestion: `Create database first: createdb ${postgresConfig.POSTGRES_DATABASE}`
});
}
process.exit(1);
} finally {
if (client) {
await client.disconnect();
logger.info('Database connection closed');
}
}
}
// Run the script
if (import.meta.main) {
main().catch((error) => {
console.error('IB setup script failed:', error);
process.exit(1);
});
}
export { main as setupIB };

View file

@ -1,4 +1,4 @@
import { QueryResult as PgQueryResult, Pool, PoolClient, QueryResultRow } from 'pg';
import { Pool, QueryResultRow } from 'pg';
import { postgresConfig } from '@stock-bot/config';
import { getLogger } from '@stock-bot/logger';
import { PostgreSQLHealthMonitor } from './health';
@ -204,6 +204,99 @@ export class PostgreSQLClient {
return await this.query<T>(query, params);
}
/**
* Batch upsert operation for high-performance inserts/updates
*/
async batchUpsert(
tableName: string,
data: Record<string, unknown>[],
conflictColumn: string,
options: {
chunkSize?: number;
excludeColumns?: string[];
} = {}
): Promise<{ insertedCount: number; updatedCount: number }> {
if (!this.pool) {
throw new Error('PostgreSQL client not connected');
}
if (data.length === 0) {
return { insertedCount: 0, updatedCount: 0 };
}
const { chunkSize = 1000, excludeColumns = [] } = options;
const columns = Object.keys(data[0]).filter(col => !excludeColumns.includes(col));
const updateColumns = columns.filter(col => col !== conflictColumn);
let totalInserted = 0;
let totalUpdated = 0;
// Process in chunks to avoid memory issues and parameter limits
for (let i = 0; i < data.length; i += chunkSize) {
const chunk = data.slice(i, i + chunkSize);
// Build placeholders for this chunk
const placeholders = chunk.map((_, rowIndex) => {
const rowPlaceholders = columns.map((_, colIndex) => {
return `$${rowIndex * columns.length + colIndex + 1}`;
});
return `(${rowPlaceholders.join(', ')})`;
});
// Flatten the chunk data
const values = chunk.flatMap(row => columns.map(col => row[col as keyof typeof row]));
// Build the upsert query
const updateClauses = updateColumns.map(col => `${col} = EXCLUDED.${col}`);
const query = `
INSERT INTO ${tableName} (${columns.join(', ')})
VALUES ${placeholders.join(', ')}
ON CONFLICT (${conflictColumn})
DO UPDATE SET
${updateClauses.join(', ')},
updated_at = NOW()
RETURNING (xmax = 0) AS is_insert
`;
try {
const startTime = Date.now();
const result = await this.pool.query(query, values);
const executionTime = Date.now() - startTime;
// Count inserts vs updates
const inserted = result.rows.filter((row: { is_insert: boolean }) => row.is_insert).length;
const updated = result.rows.length - inserted;
totalInserted += inserted;
totalUpdated += updated;
this.logger.debug(`Batch upsert chunk processed in ${executionTime}ms`, {
chunkSize: chunk.length,
inserted,
updated,
table: tableName,
});
} catch (error) {
this.logger.error(`Batch upsert failed on chunk ${Math.floor(i / chunkSize) + 1}:`, {
error,
table: tableName,
chunkStart: i,
chunkSize: chunk.length,
});
throw error;
}
}
this.logger.info('Batch upsert completed', {
table: tableName,
totalRecords: data.length,
inserted: totalInserted,
updated: totalUpdated,
});
return { insertedCount: totalInserted, updatedCount: totalUpdated };
}
/**
* Check if a table exists
*/

View file

@ -40,6 +40,8 @@
"docker:start": "./scripts/docker.sh start",
"docker:stop": "./scripts/docker.sh stop",
"docker:restart": "./scripts/docker.sh restart",
"db:setup-ib": "bun run database/postgres/scripts/setup-ib.ts",
"db:init": "bun run database/postgres/scripts/init.ts",
"docker:status": "./scripts/docker.sh status",
"docker:logs": "./scripts/docker.sh logs",
"docker:reset": "./scripts/docker.sh reset",
@ -86,8 +88,10 @@
"bun": ">=1.1.0"
},
"dependencies": {
"@types/pg": "^8.15.4",
"bullmq": "^5.53.2",
"ioredis": "^5.6.1",
"pg": "^8.16.0",
"playwright": "^1.53.0"
},
"trustedDependencies": [

86
proxy-playwrite.ts Normal file
View file

@ -0,0 +1,86 @@
#!/usr/bin/env bun
/**
* Quick test tool to open Playwright in non-headless mode with proxy
* For debugging and manual testing of the IB website
*/
import { Browser } from '@stock-bot/browser';
async function testProxyBrowser() {
try {
console.log('🚀 Starting proxy browser test...');
// Initialize browser in non-headless mode
await Browser.initialize({
headless: false, // Non-headless for debugging
timeout: 30000, // 30 second timeout
blockResources: false, // Allow all resources
});
console.log('✅ Browser initialized in non-headless mode');
// Create page with your proxy
const { page } = await Browser.createPageWithProxy(
'https://www.interactivebrokers.com/en/trading/products-exchanges.php#/',
'http://doimvbnb-US-rotate:w5fpiwrb9895@p.webshare.io:80'
);
console.log('✅ Page created with proxy');
// Set up network event logging to see what's happening
page.onNetworkEvent(event => {
if (event.type === 'request') {
console.log('📤 Request:', {
url: event.url,
method: event.method,
data: event.requestData,
headers: Object.keys(event.headers || {}).length,
});
}
if (event.type === 'response') {
console.log('📥 Response:', {
url: event.url,
status: event.status,
});
}
});
console.log('⏳ Waiting for page load...');
await page.waitForLoadState('domcontentloaded', { timeout: 30000 });
console.log('✅ Page loaded - you can now interact with it manually');
// Log current URL
const currentUrl = page.url();
console.log('🌐 Current URL:', currentUrl);
// Keep the browser open for manual testing
console.log('🔍 Browser is open for manual testing...');
console.log('💡 You can now:');
console.log(' - Click around the page manually');
console.log(' - Check if the proxy is working');
console.log(' - Test the Products tab and radio buttons');
console.log(' - Press Ctrl+C to close when done');
// Wait indefinitely until user closes
await new Promise(resolve => {
process.on('SIGINT', () => {
console.log('👋 Closing browser...');
resolve(void 0);
});
});
} catch (error) {
console.error('❌ Proxy browser test failed:', { error });
} finally {
try {
// await Browser.cleanup();
console.log('🧹 Browser cleanup completed');
} catch (cleanupError) {
console.error('Failed to cleanup browser:', { error: cleanupError });
}
}
}
// Run the test
if (import.meta.main) {
testProxyBrowser().catch(console.error);
}
export { testProxyBrowser };

View file

@ -0,0 +1,325 @@
#!/bin/bash
# Stock Bot - Host Machine Dependencies Setup Script
# This script installs all necessary system dependencies for running the stock bot
# with browser automation capabilities
set -e # Exit on any error
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Helper functions
log_info() {
echo -e "${BLUE}[INFO]${NC} $1"
}
log_success() {
echo -e "${GREEN}[SUCCESS]${NC} $1"
}
log_warning() {
echo -e "${YELLOW}[WARNING]${NC} $1"
}
log_error() {
echo -e "${RED}[ERROR]${NC} $1"
}
# Check if running as root
check_not_root() {
if [ "$EUID" -eq 0 ]; then
log_error "Please do not run this script as root. It will use sudo when necessary."
exit 1
fi
}
# Detect OS
detect_os() {
if [[ -f /etc/os-release ]]; then
. /etc/os-release
OS=$ID
VERSION=$VERSION_ID
log_info "Detected OS: $OS $VERSION"
else
log_error "Cannot detect OS. This script supports Ubuntu/Debian systems."
exit 1
fi
}
# Update package lists
update_packages() {
log_info "Updating package lists..."
sudo apt update
log_success "Package lists updated"
}
# Install system dependencies for Playwright
install_playwright_deps() {
log_info "Installing Playwright system dependencies..."
# Install basic dependencies
sudo apt install -y \
wget \
curl \
gnupg \
ca-certificates \
software-properties-common \
apt-transport-https
# Install browser dependencies
sudo apt install -y \
libnss3 \
libnspr4 \
libatk-bridge2.0-0 \
libdrm2 \
libxkbcommon0 \
libxcomposite1 \
libxdamage1 \
libxrandr2 \
libgbm1 \
libxss1 \
libasound2 \
libatspi2.0-0 \
libgtk-3-0
# Install additional media and graphics libraries
sudo apt install -y \
libgconf-2-4 \
libxfixes3 \
libxinerama1 \
libxi6 \
libxrandr2 \
libasound2-dev \
libpangocairo-1.0-0 \
libcairo-gobject2 \
libcairo2 \
libgdk-pixbuf2.0-0 \
libgtk-3-0 \
libglib2.0-0 \
libpango-1.0-0 \
libharfbuzz0b \
libfreetype6 \
libfontconfig1
# Install fonts
sudo apt install -y \
fonts-liberation \
fonts-noto-color-emoji \
fonts-noto-cjk
log_success "Playwright system dependencies installed"
}
# Install development tools
install_dev_tools() {
log_info "Installing development tools..."
sudo apt install -y \
build-essential \
git \
curl \
wget \
unzip \
vim \
htop \
jq \
tree
log_success "Development tools installed"
}
# Install Docker (if not already installed)
install_docker() {
if command -v docker &> /dev/null; then
log_info "Docker is already installed"
return
fi
log_info "Installing Docker..."
# Add Docker's official GPG key
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
# Add Docker repository
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
# Update package lists and install Docker
sudo apt update
sudo apt install -y docker-ce docker-ce-cli containerd.io docker-compose-plugin
# Add user to docker group
sudo usermod -aG docker $USER
log_success "Docker installed. Please log out and back in for group changes to take effect."
}
# Install Node.js and Bun (if not already installed)
install_runtime() {
# Check if Bun is installed
if command -v bun &> /dev/null; then
log_info "Bun is already installed: $(bun --version)"
else
log_info "Installing Bun..."
curl -fsSL https://bun.sh/install | bash
export PATH="$HOME/.bun/bin:$PATH"
log_success "Bun installed"
fi
# Check if Node.js is installed
if command -v node &> /dev/null; then
log_info "Node.js is already installed: $(node --version)"
else
log_info "Installing Node.js..."
curl -fsSL https://deb.nodesource.com/setup_20.x | sudo -E bash -
sudo apt install -y nodejs
log_success "Node.js installed"
fi
}
# Install additional system dependencies for proxy and networking
install_network_deps() {
log_info "Installing networking and proxy dependencies..."
sudo apt install -y \
net-tools \
iputils-ping \
telnet \
netcat \
socat \
proxychains4 \
tor \
privoxy
log_success "Networking dependencies installed"
}
# Install system monitoring tools
install_monitoring_tools() {
log_info "Installing system monitoring tools..."
sudo apt install -y \
htop \
iotop \
nethogs \
iftop \
dstat \
sysstat \
lsof
log_success "Monitoring tools installed"
}
# Configure system limits for better performance
configure_system_limits() {
log_info "Configuring system limits..."
# Increase file descriptor limits
echo "fs.file-max = 2097152" | sudo tee -a /etc/sysctl.conf
echo "* soft nofile 65536" | sudo tee -a /etc/security/limits.conf
echo "* hard nofile 65536" | sudo tee -a /etc/security/limits.conf
# Apply sysctl changes
sudo sysctl -p
log_success "System limits configured"
}
# Install Playwright browsers
install_playwright_browsers() {
log_info "Installing Playwright browsers..."
# Navigate to project directory
cd "$(dirname "$0")/.."
# Install Playwright browsers
bunx playwright install chromium
bunx playwright install firefox
bunx playwright install webkit
log_success "Playwright browsers installed"
}
# Main installation function
main() {
log_info "Starting Stock Bot host dependencies setup..."
check_not_root
detect_os
# Only proceed if Ubuntu/Debian
if [[ "$OS" != "ubuntu" && "$OS" != "debian" ]]; then
log_error "This script only supports Ubuntu and Debian systems"
exit 1
fi
# Run installation steps
update_packages
install_dev_tools
install_playwright_deps
install_runtime
install_docker
install_network_deps
install_monitoring_tools
configure_system_limits
install_playwright_browsers
log_success "Host dependencies setup completed!"
log_info "Next steps:"
echo " 1. Log out and back in (or run 'newgrp docker') to activate Docker group membership"
echo " 2. Run 'source ~/.bashrc' to update PATH for Bun"
echo " 3. Navigate to your project directory and run 'bun install'"
echo " 4. Test the setup with 'bun run dev'"
}
# Show help
show_help() {
echo "Stock Bot Host Dependencies Setup Script"
echo ""
echo "Usage: $0 [OPTIONS]"
echo ""
echo "Options:"
echo " -h, --help Show this help message"
echo " --skip-docker Skip Docker installation"
echo " --minimal Install only essential dependencies"
echo ""
echo "This script installs all necessary system dependencies for:"
echo " - Playwright browser automation"
echo " - Docker containers"
echo " - Development tools"
echo " - Network utilities"
echo " - System monitoring tools"
}
# Parse command line arguments
SKIP_DOCKER=false
MINIMAL=false
while [[ $# -gt 0 ]]; do
case $1 in
-h|--help)
show_help
exit 0
;;
--skip-docker)
SKIP_DOCKER=true
shift
;;
--minimal)
MINIMAL=true
shift
;;
*)
log_error "Unknown option: $1"
show_help
exit 1
;;
esac
done
# Run main function
main

134
scripts/setup-playwright.sh Executable file
View file

@ -0,0 +1,134 @@
#!/bin/bash
# Playwright Setup Script for Stock Bot
# This script specifically handles Playwright installation and browser setup
set -e
# Colors
GREEN='\033[0;32m'
BLUE='\033[0;34m'
RED='\033[0;31m'
NC='\033[0m'
log_info() {
echo -e "${BLUE}[INFO]${NC} $1"
}
log_success() {
echo -e "${GREEN}[SUCCESS]${NC} $1"
}
log_error() {
echo -e "${RED}[ERROR]${NC} $1"
}
# Check if we're in the project directory
check_project_directory() {
if [[ ! -f "package.json" ]]; then
log_error "Please run this script from the project root directory"
exit 1
fi
}
# Install Playwright dependencies
install_playwright_deps() {
log_info "Installing Playwright system dependencies..."
bunx playwright install-deps chromium
log_success "Playwright system dependencies installed"
}
# Install Playwright browsers
install_browsers() {
log_info "Installing Playwright browsers..."
# Install all browsers
bunx playwright install chromium
bunx playwright install firefox
bunx playwright install webkit
log_success "All Playwright browsers installed"
}
# Test Playwright installation
test_playwright() {
log_info "Testing Playwright installation..."
# Create a simple test script
cat > /tmp/test-playwright.js << 'EOF'
const { chromium } = require('playwright');
(async () => {
try {
const browser = await chromium.launch({ headless: true });
const page = await browser.newPage();
await page.goto('https://example.com');
const title = await page.title();
console.log('✅ Playwright test successful! Page title:', title);
await browser.close();
} catch (error) {
console.error('❌ Playwright test failed:', error.message);
process.exit(1);
}
})();
EOF
# Run the test
node /tmp/test-playwright.js
# Clean up
rm /tmp/test-playwright.js
log_success "Playwright test completed successfully"
}
# Main function
main() {
log_info "Setting up Playwright for Stock Bot..."
check_project_directory
install_playwright_deps
install_browsers
test_playwright
log_success "Playwright setup completed!"
log_info "You can now run your browser automation scripts"
}
# Show help
show_help() {
echo "Playwright Setup Script for Stock Bot"
echo ""
echo "Usage: $0 [OPTIONS]"
echo ""
echo "Options:"
echo " -h, --help Show this help message"
echo " --test-only Only run the Playwright test"
echo " --deps-only Only install system dependencies"
echo ""
}
# Parse arguments
case "${1:-}" in
-h|--help)
show_help
exit 0
;;
--test-only)
check_project_directory
test_playwright
exit 0
;;
--deps-only)
install_playwright_deps
exit 0
;;
"")
main
;;
*)
log_error "Unknown option: $1"
show_help
exit 1
;;
esac