This commit is contained in:
Boki 2026-02-23 09:38:24 -05:00
parent adc2450013
commit dcbeebac83
14 changed files with 859 additions and 368 deletions

View file

@ -1,4 +1,3 @@
using Microsoft.Playwright;
using Poe2Trade.Core;
using Poe2Trade.Game;
using Poe2Trade.Inventory;
@ -368,7 +367,7 @@ public class BotOrchestrator : IAsyncDisposable
PipelineService.Dispose();
}
private void OnNewListings(string searchId, List<string> itemIds, IPage page)
private void OnNewListings(string searchId, List<string> itemIds)
{
if (_paused)
{
@ -385,8 +384,7 @@ public class BotOrchestrator : IAsyncDisposable
ItemIds: itemIds,
WhisperText: "",
Timestamp: DateTimeOffset.UtcNow.ToUnixTimeMilliseconds(),
TradeUrl: "",
Page: page
TradeUrl: ""
));
}

View file

@ -1,5 +1,3 @@
using System.Text.Json;
using Microsoft.Playwright;
using Poe2Trade.Core;
using Poe2Trade.Game;
using Poe2Trade.Inventory;
@ -13,7 +11,7 @@ public class ScrapExecutor
{
private ScrapState _state = ScrapState.Idle;
private bool _stopped;
private IPage? _activePage;
private string? _activeScrapId;
private PostAction _postAction = PostAction.Salvage;
private readonly IGameController _game;
private readonly IScreenReader _screen;
@ -46,10 +44,10 @@ public class ScrapExecutor
public async Task Stop()
{
_stopped = true;
if (_activePage != null)
if (_activeScrapId != null)
{
try { await _activePage.CloseAsync(); } catch { }
_activePage = null;
try { await _tradeMonitor.CloseScrapPage(_activeScrapId); } catch { }
_activeScrapId = null;
}
SetState(ScrapState.Idle);
Log.Information("Scrap executor stopped");
@ -63,8 +61,8 @@ public class ScrapExecutor
await _inventory.ScanInventory(_postAction);
var (page, items) = await _tradeMonitor.OpenScrapPage(tradeUrl);
_activePage = page;
var (scrapId, items) = await _tradeMonitor.OpenScrapPage(tradeUrl);
_activeScrapId = scrapId;
Log.Information("Trade page opened: {Count} items", items.Count);
while (!_stopped)
@ -94,7 +92,7 @@ public class ScrapExecutor
continue;
}
var success = await BuyItem(page, item);
var success = await BuyItem(item);
if (!success) Log.Warning("Failed to buy item {Id}", item.Id);
await Helpers.RandomDelay(500, 1000);
@ -103,7 +101,7 @@ public class ScrapExecutor
if (_stopped) break;
Log.Information("Page exhausted, refreshing...");
items = await RefreshPage(page);
items = await _tradeMonitor.ReloadScrapPage(_activeScrapId!);
Log.Information("Page refreshed: {Count} items", items.Count);
if (items.Count == 0)
@ -111,20 +109,20 @@ public class ScrapExecutor
Log.Information("No items after refresh, waiting...");
await Helpers.Sleep(Delays.EmptyRefreshWait);
if (_stopped) break;
items = await RefreshPage(page);
items = await _tradeMonitor.ReloadScrapPage(_activeScrapId!);
}
}
_activePage = null;
_activeScrapId = null;
SetState(ScrapState.Idle);
Log.Information("Scrap loop ended");
}
private async Task<bool> BuyItem(IPage page, TradeItem item)
private async Task<bool> BuyItem(TradeItem item)
{
try
{
if (!await TravelToSellerIfNeeded(page, item))
if (!await TravelToSellerIfNeeded(item))
return false;
SetState(ScrapState.Buying);
@ -150,7 +148,7 @@ public class ScrapExecutor
}
}
private async Task<bool> TravelToSellerIfNeeded(IPage page, TradeItem item)
private async Task<bool> TravelToSellerIfNeeded(TradeItem item)
{
var alreadyAtSeller = !_inventory.IsAtOwnHideout
&& !string.IsNullOrEmpty(item.Account)
@ -167,7 +165,7 @@ public class ScrapExecutor
_config.TravelTimeoutMs,
async () =>
{
if (!await _tradeMonitor.ClickTravelToHideout(page, item.Id))
if (!await _tradeMonitor.ClickTravelToHideout(_activeScrapId!, item.Id))
throw new Exception("Failed to click Travel to Hideout");
});
if (!arrived)
@ -197,35 +195,4 @@ public class ScrapExecutor
SetState(ScrapState.Failed);
}
}
private async Task<List<TradeItem>> RefreshPage(IPage page)
{
var items = new List<TradeItem>();
void OnResponse(object? _, IResponse response)
{
if (!response.Url.Contains("/api/trade2/fetch/")) return;
try
{
var body = response.TextAsync().GetAwaiter().GetResult();
using var doc = JsonDocument.Parse(body);
if (doc.RootElement.TryGetProperty("result", out var results) &&
results.ValueKind == JsonValueKind.Array)
{
foreach (var r in results.EnumerateArray())
items.Add(TradeMonitor.ParseTradeItem(r));
}
}
catch (Exception ex)
{
Log.Debug(ex, "Non-JSON trade response");
}
}
page.Response += OnResponse;
await page.ReloadAsync(new PageReloadOptions { WaitUntil = WaitUntilState.NetworkIdle });
await Helpers.Sleep(Delays.PageLoad);
page.Response -= OnResponse;
return items;
}
}

View file

@ -1,4 +1,3 @@
using Microsoft.Playwright;
using Poe2Trade.Core;
using Poe2Trade.Game;
using Poe2Trade.Inventory;
@ -39,16 +38,9 @@ public class TradeExecutor
public async Task<bool> ExecuteTrade(TradeInfo trade)
{
var page = trade.Page as IPage;
if (page == null)
{
Log.Error("Trade has no page reference");
return false;
}
try
{
if (!await TravelToSeller(page, trade))
if (!await TravelToSeller(trade))
return false;
if (!await FindSellerStash())
@ -81,7 +73,7 @@ public class TradeExecutor
}
}
private async Task<bool> TravelToSeller(IPage page, TradeInfo trade)
private async Task<bool> TravelToSeller(TradeInfo trade)
{
SetState(TradeState.Traveling);
Log.Information("Clicking Travel to Hideout for {SearchId}...", trade.SearchId);
@ -90,7 +82,7 @@ public class TradeExecutor
_config.TravelTimeoutMs,
async () =>
{
if (!await _tradeMonitor.ClickTravelToHideout(page, trade.ItemIds[0]))
if (!await _tradeMonitor.ClickTravelToHideout(trade.SearchId, trade.ItemIds[0]))
throw new Exception("Failed to click Travel to Hideout");
});
if (!arrived)

View file

@ -7,8 +7,7 @@ public record TradeInfo(
List<string> ItemIds,
string WhisperText,
long Timestamp,
string TradeUrl,
object? Page // Playwright Page reference
string TradeUrl
);
public record TradeItem(

View file

@ -1,15 +1,16 @@
using Microsoft.Playwright;
using Poe2Trade.Core;
namespace Poe2Trade.Trade;
public interface ITradeMonitor : IAsyncDisposable
{
event Action<string, List<string>, IPage>? NewListings;
event Action<string, List<string>>? NewListings;
Task Start(string? dashboardUrl = null);
Task AddSearch(string tradeUrl);
Task PauseSearch(string searchId);
Task<bool> ClickTravelToHideout(IPage page, string? itemId = null);
Task<(IPage Page, List<TradeItem> Items)> OpenScrapPage(string tradeUrl);
Task<bool> ClickTravelToHideout(string pageId, string? itemId = null);
Task<(string ScrapId, List<TradeItem> Items)> OpenScrapPage(string tradeUrl);
Task<List<TradeItem>> ReloadScrapPage(string scrapId);
Task CloseScrapPage(string scrapId);
string ExtractSearchId(string url);
}

View file

@ -5,7 +5,6 @@
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Playwright" Version="1.49.0" />
<ProjectReference Include="..\Poe2Trade.Core\Poe2Trade.Core.csproj" />
</ItemGroup>
</Project>

View file

@ -0,0 +1,337 @@
using System.Collections.Concurrent;
using System.Diagnostics;
using System.Text.Json;
using System.Text.Json.Serialization;
using Poe2Trade.Core;
using Serilog;
namespace Poe2Trade.Trade;
public class TradeDaemonBridge : ITradeMonitor
{
private static readonly JsonSerializerOptions JsonOpts = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
};
private Process? _proc;
private int _reqCounter;
private readonly ConcurrentDictionary<int, TaskCompletionSource<JsonElement>> _pending = new();
private readonly SavedSettings _config;
private readonly string _daemonScript;
private readonly string _nodeExe;
public event Action<string, List<string>>? NewListings;
public TradeDaemonBridge(SavedSettings config)
{
_config = config;
_daemonScript = Path.GetFullPath(Path.Combine("tools", "trade-daemon", "daemon.mjs"));
_nodeExe = "node";
}
public async Task Start(string? dashboardUrl = null)
{
EnsureDaemonRunning();
var userDataDir = Path.GetFullPath(_config.BrowserUserDataDir);
await SendCommand("start", new
{
browserUserDataDir = userDataDir,
headless = _config.Headless,
dashboardUrl,
});
Log.Information("Trade daemon browser started");
}
public async Task AddSearch(string tradeUrl)
{
EnsureDaemonRunning();
await SendCommand("addSearch", new { url = tradeUrl });
}
public async Task PauseSearch(string searchId)
{
EnsureDaemonRunning();
await SendCommand("pauseSearch", new { searchId });
}
public async Task<bool> ClickTravelToHideout(string pageId, string? itemId = null)
{
EnsureDaemonRunning();
var resp = await SendCommand("clickTravel", new { pageId, itemId });
return resp.TryGetProperty("clicked", out var c) && c.GetBoolean();
}
public async Task<(string ScrapId, List<TradeItem> Items)> OpenScrapPage(string tradeUrl)
{
EnsureDaemonRunning();
var resp = await SendCommand("openScrapPage", new { url = tradeUrl });
var scrapId = resp.GetProperty("scrapId").GetString()!;
var items = ParseItems(resp);
return (scrapId, items);
}
public async Task<List<TradeItem>> ReloadScrapPage(string scrapId)
{
EnsureDaemonRunning();
var resp = await SendCommand("reloadScrapPage", new { scrapId });
return ParseItems(resp);
}
public async Task CloseScrapPage(string scrapId)
{
EnsureDaemonRunning();
await SendCommand("closeScrapPage", new { scrapId });
}
public string ExtractSearchId(string url)
{
var cleaned = System.Text.RegularExpressions.Regex.Replace(url, @"/live/?$", "");
var parts = cleaned.Split('/');
return parts.Length > 0 ? parts[^1] : url;
}
public async ValueTask DisposeAsync()
{
if (_proc != null && !_proc.HasExited)
{
try
{
// Send stop command (best effort)
var reqId = Interlocked.Increment(ref _reqCounter);
var msg = JsonSerializer.Serialize(new { reqId, cmd = "stop" }, JsonOpts);
await _proc.StandardInput.WriteLineAsync(msg);
await _proc.StandardInput.FlushAsync();
_proc.WaitForExit(5000);
}
catch { /* ignore */ }
if (_proc != null && !_proc.HasExited)
{
try { _proc.Kill(); } catch { /* ignore */ }
}
}
_proc?.Dispose();
_proc = null;
// Complete any pending requests
foreach (var kv in _pending)
{
kv.Value.TrySetCanceled();
_pending.TryRemove(kv.Key, out _);
}
Log.Information("Trade daemon stopped");
}
private async Task<JsonElement> SendCommand(string cmd, object? parameters = null)
{
if (_proc == null || _proc.HasExited)
throw new InvalidOperationException("Trade daemon is not running");
var reqId = Interlocked.Increment(ref _reqCounter);
var tcs = new TaskCompletionSource<JsonElement>(TaskCreationOptions.RunContinuationsAsynchronously);
_pending[reqId] = tcs;
// Build command object: merge reqId + cmd + params
var dict = new Dictionary<string, object?> { ["reqId"] = reqId, ["cmd"] = cmd };
if (parameters != null)
{
var paramJson = JsonSerializer.SerializeToElement(parameters, JsonOpts);
foreach (var prop in paramJson.EnumerateObject())
dict[prop.Name] = prop.Value;
}
var json = JsonSerializer.Serialize(dict, JsonOpts);
await _proc.StandardInput.WriteLineAsync(json);
await _proc.StandardInput.FlushAsync();
// Await response with timeout
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(60));
cts.Token.Register(() => tcs.TrySetCanceled());
try
{
return await tcs.Task;
}
finally
{
_pending.TryRemove(reqId, out _);
}
}
private void EnsureDaemonRunning()
{
if (_proc != null && !_proc.HasExited)
return;
_proc?.Dispose();
_proc = null;
if (!File.Exists(_daemonScript))
throw new FileNotFoundException($"Trade daemon not found at {_daemonScript}");
Log.Information("Spawning trade daemon: {Node} {Script}", _nodeExe, _daemonScript);
var proc = new Process
{
StartInfo = new ProcessStartInfo
{
FileName = _nodeExe,
Arguments = $"\"{_daemonScript}\"",
UseShellExecute = false,
RedirectStandardInput = true,
RedirectStandardOutput = true,
RedirectStandardError = true,
CreateNoWindow = true,
}
};
proc.ErrorDataReceived += (_, e) =>
{
if (!string.IsNullOrEmpty(e.Data))
Log.Debug("[trade-daemon] {Line}", e.Data);
};
try
{
proc.Start();
proc.BeginErrorReadLine();
// Wait for ready signal (up to 15s)
var readyTask = Task.Run(() => proc.StandardOutput.ReadLine());
if (!readyTask.Wait(TimeSpan.FromSeconds(15)))
throw new TimeoutException("Trade daemon did not send ready signal within 15s");
var readyLine = readyTask.Result
?? throw new Exception("Trade daemon exited before ready signal");
var readyDoc = JsonDocument.Parse(readyLine);
if (!readyDoc.RootElement.TryGetProperty("type", out var typeProp) ||
typeProp.GetString() != "ready")
throw new Exception($"Trade daemon did not send ready signal: {readyLine}");
}
catch
{
try { if (!proc.HasExited) proc.Kill(); } catch { /* best effort */ }
proc.Dispose();
throw;
}
_proc = proc;
// Start background reader thread
_ = Task.Run(() => ReadLoop(proc));
Log.Information("Trade daemon ready");
}
private void ReadLoop(Process proc)
{
try
{
while (!proc.HasExited)
{
var line = proc.StandardOutput.ReadLine();
if (line == null) break;
try
{
using var doc = JsonDocument.Parse(line);
var root = doc.RootElement;
var type = root.GetProperty("type").GetString();
if (type == "response")
{
var reqId = root.GetProperty("reqId").GetInt32();
if (_pending.TryGetValue(reqId, out var tcs))
{
var ok = root.GetProperty("ok").GetBoolean();
if (ok)
tcs.TrySetResult(root.Clone());
else
{
var error = root.TryGetProperty("error", out var e)
? e.GetString() ?? "Unknown error"
: "Unknown error";
tcs.TrySetException(new Exception($"Trade daemon error: {error}"));
}
}
}
else if (type == "event")
{
HandleEvent(root);
}
}
catch (Exception ex)
{
Log.Debug("Failed to parse daemon output: {Line} - {Error}", line, ex.Message);
}
}
}
catch (Exception ex)
{
Log.Warning(ex, "Trade daemon read loop ended");
}
// Daemon exited — fail all pending requests
foreach (var kv in _pending)
{
kv.Value.TrySetException(new Exception("Trade daemon process exited"));
_pending.TryRemove(kv.Key, out _);
}
}
private void HandleEvent(JsonElement root)
{
var eventName = root.GetProperty("event").GetString();
switch (eventName)
{
case "newListings":
var searchId = root.GetProperty("searchId").GetString()!;
var itemIds = root.GetProperty("itemIds").EnumerateArray()
.Select(e => e.GetString()!)
.Where(s => s != null)
.ToList();
if (itemIds.Count > 0)
{
Log.Information("New listings from daemon: {SearchId} ({Count} items)", searchId, itemIds.Count);
NewListings?.Invoke(searchId, itemIds);
}
break;
case "wsClose":
var closedId = root.GetProperty("searchId").GetString()!;
Log.Warning("WebSocket closed (daemon): {SearchId}", closedId);
break;
default:
Log.Debug("Unknown daemon event: {Event}", eventName);
break;
}
}
private static List<TradeItem> ParseItems(JsonElement resp)
{
var items = new List<TradeItem>();
if (resp.TryGetProperty("items", out var arr) && arr.ValueKind == JsonValueKind.Array)
{
foreach (var el in arr.EnumerateArray())
{
items.Add(new TradeItem(
el.GetProperty("id").GetString() ?? "",
el.TryGetProperty("w", out var w) ? w.GetInt32() : 1,
el.TryGetProperty("h", out var h) ? h.GetInt32() : 1,
el.TryGetProperty("stashX", out var sx) ? sx.GetInt32() : 0,
el.TryGetProperty("stashY", out var sy) ? sy.GetInt32() : 0,
el.TryGetProperty("account", out var acc) ? acc.GetString() ?? "" : ""
));
}
}
return items;
}
}

View file

@ -1,296 +0,0 @@
using System.Text.Json;
using Microsoft.Playwright;
using Poe2Trade.Core;
using Serilog;
namespace Poe2Trade.Trade;
public class TradeMonitor : ITradeMonitor
{
private IBrowserContext? _context;
private readonly Dictionary<string, IPage> _pages = new();
private readonly HashSet<string> _pausedSearches = new();
private readonly SavedSettings _config;
private const string StealthScript = """
Object.defineProperty(navigator, 'webdriver', { get: () => undefined });
Object.defineProperty(navigator, 'plugins', {
get: () => [
{ name: 'Chrome PDF Plugin', filename: 'internal-pdf-viewer' },
{ name: 'Chrome PDF Viewer', filename: 'mhjfbmdgcfjbbpaeojofohoefgiehjai' },
{ name: 'Native Client', filename: 'internal-nacl-plugin' },
],
});
Object.defineProperty(navigator, 'languages', { get: () => ['en-US', 'en'] });
delete window.__playwright;
delete window.__pw_manual;
if (!window.chrome) window.chrome = {};
if (!window.chrome.runtime) window.chrome.runtime = { id: undefined };
const originalQuery = window.navigator.permissions?.query;
if (originalQuery) {
window.navigator.permissions.query = (params) => {
if (params.name === 'notifications')
return Promise.resolve({ state: Notification.permission });
return originalQuery(params);
};
}
""";
public event Action<string, List<string>, IPage>? NewListings;
public TradeMonitor(SavedSettings config)
{
_config = config;
}
public async Task Start(string? dashboardUrl = null)
{
Log.Information("Launching Playwright browser (stealth mode)...");
var playwright = await Playwright.CreateAsync();
_context = await playwright.Chromium.LaunchPersistentContextAsync(
_config.BrowserUserDataDir,
new BrowserTypeLaunchPersistentContextOptions
{
Headless = _config.Headless,
ViewportSize = null,
Args = [
"--disable-blink-features=AutomationControlled",
"--disable-features=AutomationControlled",
"--no-first-run",
"--no-default-browser-check",
"--disable-infobars",
],
IgnoreDefaultArgs = ["--enable-automation"],
});
await _context.AddInitScriptAsync(StealthScript);
if (dashboardUrl != null)
{
var pages = _context.Pages;
if (pages.Count > 0)
await pages[0].GotoAsync(dashboardUrl);
else
await (await _context.NewPageAsync()).GotoAsync(dashboardUrl);
Log.Information("Dashboard opened: {Url}", dashboardUrl);
}
Log.Information("Browser launched (stealth active)");
}
public async Task AddSearch(string tradeUrl)
{
if (_context == null) throw new InvalidOperationException("Browser not started");
var searchId = ExtractSearchId(tradeUrl);
if (_pages.ContainsKey(searchId))
{
Log.Information("Search already open: {SearchId}", searchId);
return;
}
Log.Information("Adding trade search: {Url} ({SearchId})", tradeUrl, searchId);
var page = await _context.NewPageAsync();
_pages[searchId] = page;
await page.GotoAsync(tradeUrl, new PageGotoOptions { WaitUntil = WaitUntilState.NetworkIdle });
await Helpers.Sleep(Delays.PageLoad);
page.WebSocket += (_, ws) => HandleWebSocket(ws, searchId, page);
try
{
var liveBtn = page.Locator(Selectors.LiveSearchButton).First;
await liveBtn.ClickAsync(new LocatorClickOptions { Timeout = 5000 });
Log.Information("Live search activated: {SearchId}", searchId);
}
catch
{
Log.Warning("Could not click Activate Live Search: {SearchId}", searchId);
}
}
public async Task PauseSearch(string searchId)
{
_pausedSearches.Add(searchId);
if (_pages.TryGetValue(searchId, out var page))
{
await page.CloseAsync();
_pages.Remove(searchId);
}
Log.Information("Search paused: {SearchId}", searchId);
}
public async Task<bool> ClickTravelToHideout(IPage page, string? itemId = null)
{
try
{
if (itemId != null)
{
var row = page.Locator(Selectors.ListingById(itemId));
if (await WaitForVisible(row, 5000))
{
var travelBtn = row.Locator(Selectors.TravelToHideoutButton).First;
if (await WaitForVisible(travelBtn, 3000))
{
await travelBtn.ClickAsync();
Log.Information("Clicked Travel to Hideout for item {ItemId}", itemId);
await HandleConfirmDialog(page);
return true;
}
}
}
var btn = page.Locator(Selectors.TravelToHideoutButton).First;
await btn.ClickAsync(new LocatorClickOptions { Timeout = 5000 });
Log.Information("Clicked Travel to Hideout");
await HandleConfirmDialog(page);
return true;
}
catch (Exception ex)
{
Log.Error(ex, "Failed to click Travel to Hideout");
return false;
}
}
public async Task<(IPage Page, List<TradeItem> Items)> OpenScrapPage(string tradeUrl)
{
if (_context == null) throw new InvalidOperationException("Browser not started");
var page = await _context.NewPageAsync();
var items = new List<TradeItem>();
page.Response += async (_, response) =>
{
if (!response.Url.Contains("/api/trade2/fetch/")) return;
try
{
var body = await response.TextAsync();
using var doc = JsonDocument.Parse(body);
if (doc.RootElement.TryGetProperty("result", out var results) &&
results.ValueKind == JsonValueKind.Array)
{
foreach (var r in results.EnumerateArray())
items.Add(ParseTradeItem(r));
}
}
catch (Exception ex) { Log.Debug(ex, "Non-JSON trade response"); }
};
await page.GotoAsync(tradeUrl, new PageGotoOptions { WaitUntil = WaitUntilState.NetworkIdle });
await Helpers.Sleep(Delays.PageLoad);
Log.Information("Scrap page opened: {Url} ({Count} items)", tradeUrl, items.Count);
return (page, items);
}
public string ExtractSearchId(string url)
{
var cleaned = System.Text.RegularExpressions.Regex.Replace(url, @"/live/?$", "");
var parts = cleaned.Split('/');
return parts.Length > 0 ? parts[^1] : url;
}
public static TradeItem ParseTradeItem(JsonElement r)
{
var id = r.GetProperty("id").GetString() ?? "";
int w = 1, h = 1, stashX = 0, stashY = 0;
var account = "";
if (r.TryGetProperty("item", out var item))
{
if (item.TryGetProperty("w", out var wProp)) w = wProp.GetInt32();
if (item.TryGetProperty("h", out var hProp)) h = hProp.GetInt32();
}
if (r.TryGetProperty("listing", out var listing))
{
if (listing.TryGetProperty("stash", out var stash))
{
if (stash.TryGetProperty("x", out var sx)) stashX = sx.GetInt32();
if (stash.TryGetProperty("y", out var sy)) stashY = sy.GetInt32();
}
if (listing.TryGetProperty("account", out var acc) &&
acc.TryGetProperty("name", out var accName))
account = accName.GetString() ?? "";
}
return new TradeItem(id, w, h, stashX, stashY, account);
}
public async ValueTask DisposeAsync()
{
foreach (var page in _pages.Values)
await page.CloseAsync();
_pages.Clear();
if (_context != null)
{
await _context.CloseAsync();
_context = null;
}
Log.Information("Trade monitor stopped");
}
private void HandleWebSocket(IWebSocket ws, string searchId, IPage page)
{
if (!ws.Url.Contains("/api/trade") || !ws.Url.Contains("/live/"))
return;
Log.Information("WebSocket connected for live search: {SearchId}", searchId);
ws.FrameReceived += (_, frame) =>
{
if (_pausedSearches.Contains(searchId)) return;
try
{
var payload = frame.Text ?? "";
using var doc = JsonDocument.Parse(payload);
if (doc.RootElement.TryGetProperty("new", out var newItems) &&
newItems.ValueKind == JsonValueKind.Array)
{
var ids = newItems.EnumerateArray()
.Select(e => e.GetString()!)
.Where(s => s != null)
.ToList();
if (ids.Count > 0)
{
Log.Information("New listings: {SearchId} ({Count} items)", searchId, ids.Count);
NewListings?.Invoke(searchId, ids, page);
}
}
}
catch (Exception ex) { Log.Debug(ex, "Non-JSON WebSocket frame"); }
};
ws.Close += (_, _) => Log.Warning("WebSocket closed: {SearchId}", searchId);
}
private async Task HandleConfirmDialog(IPage page)
{
await Helpers.Sleep(500);
try
{
var confirmBtn = page.Locator(Selectors.ConfirmYesButton).First;
if (await WaitForVisible(confirmBtn, 2000))
{
await confirmBtn.ClickAsync();
Log.Information("Confirmed dialog");
}
}
catch { /* No dialog */ }
}
private static async Task<bool> WaitForVisible(ILocator locator, int timeoutMs)
{
try
{
await locator.WaitForAsync(new LocatorWaitForOptions
{
State = WaitForSelectorState.Visible,
Timeout = timeoutMs
});
return true;
}
catch (TimeoutException) { return false; }
}
}

View file

@ -43,7 +43,7 @@ public partial class App : Application
services.AddSingleton<IScreenReader, ScreenReader>();
services.AddSingleton<IClientLogWatcher>(sp =>
new ClientLogWatcher(sp.GetRequiredService<SavedSettings>().Poe2LogPath));
services.AddSingleton<ITradeMonitor, TradeMonitor>();
services.AddSingleton<ITradeMonitor, TradeDaemonBridge>();
services.AddSingleton<IInventoryManager, InventoryManager>();
// Bot

View file

@ -213,7 +213,8 @@ public partial class SettingsViewModel : ObservableObject
partial void OnStashScanTimeoutMsChanged(decimal? value) => IsSaved = false;
partial void OnWaitForMoreItemsMsChanged(decimal? value) => IsSaved = false;
partial void OnBetweenTradesDelayMsChanged(decimal? value) => IsSaved = false;
partial void OnHeadlessChanged(bool value) => IsSaved = false;
partial void OnHeadlessChanged(bool value) =>
_bot.Store.UpdateSettings(s => s.Headless = value);
partial void OnShowHudDebugChanged(bool value) => IsSaved = false;
partial void OnOcrEngineChanged(string value) => IsSaved = false;
}

View file

@ -184,7 +184,7 @@
<DockPanel>
<Button DockPanel.Dock="Right" Content="X" FontSize="10"
VerticalAlignment="Center"
Command="{Binding #LinksControl.((vm:MainWindowViewModel)DataContext).RemoveLinkCommand}"
Command="{ReflectionBinding #LinksControl.DataContext.RemoveLinkCommand}"
CommandParameter="{Binding Id}" />
<CheckBox DockPanel.Dock="Left"
IsChecked="{Binding Active}"

View file

@ -0,0 +1,425 @@
import { chromium } from "playwright";
import { createInterface } from "readline";
// All logging goes to stderr — never corrupt the JSON protocol on stdout
const log = (...args) => process.stderr.write(`[trade-daemon] ${args.join(" ")}\n`);
// --- Protocol helpers ---
function sendJson(obj) {
process.stdout.write(JSON.stringify(obj) + "\n");
}
function sendResponse(reqId, extras = {}) {
sendJson({ type: "response", reqId, ok: true, ...extras });
}
function sendError(reqId, error) {
sendJson({ type: "response", reqId, ok: false, error: String(error) });
}
function sendEvent(event, data = {}) {
sendJson({ type: "event", event, ...data });
}
// --- Stealth script (same as the working TS version) ---
const STEALTH_SCRIPT = `
Object.defineProperty(navigator, 'webdriver', { get: () => undefined });
Object.defineProperty(navigator, 'plugins', {
get: () => [
{ name: 'Chrome PDF Plugin', filename: 'internal-pdf-viewer' },
{ name: 'Chrome PDF Viewer', filename: 'mhjfbmdgcfjbbpaeojofohoefgiehjai' },
{ name: 'Native Client', filename: 'internal-nacl-plugin' },
],
});
Object.defineProperty(navigator, 'languages', { get: () => ['en-US', 'en'] });
delete window.__playwright;
delete window.__pw_manual;
if (!window.chrome) window.chrome = {};
if (!window.chrome.runtime) window.chrome.runtime = { id: undefined };
const originalQuery = window.navigator.permissions?.query;
if (originalQuery) {
window.navigator.permissions.query = (params) => {
if (params.name === 'notifications')
return Promise.resolve({ state: Notification.permission });
return originalQuery(params);
};
}
`;
// --- Selectors (mirrored from Selectors.cs) ---
const Selectors = {
LiveSearchButton: 'button.livesearch-btn, button:has-text("Activate Live Search")',
ListingRow: '.resultset .row, [class*="result"]',
ListingById: (id) => `[data-id="${id}"]`,
TravelToHideoutButton:
'button:has-text("Travel to Hideout"), button:has-text("Visit Hideout"), a:has-text("Travel to Hideout"), [class*="hideout"]',
ConfirmYesButton:
'button:has-text("Yes"), button:has-text("Confirm"), button:has-text("OK"), button:has-text("Accept")',
};
// --- State ---
let context = null;
const searchPages = new Map(); // searchId → page
const pausedSearches = new Set();
const scrapPages = new Map(); // scrapId → { page, items }
let scrapIdCounter = 0;
// --- Helpers ---
function extractSearchId(url) {
const cleaned = url.replace(/\/live\/?$/, "");
const parts = cleaned.split("/");
return parts[parts.length - 1] || url;
}
function parseTradeItem(r) {
const id = r.id || "";
let w = 1,
h = 1,
stashX = 0,
stashY = 0,
account = "";
if (r.item) {
if (r.item.w != null) w = r.item.w;
if (r.item.h != null) h = r.item.h;
}
if (r.listing) {
if (r.listing.stash) {
if (r.listing.stash.x != null) stashX = r.listing.stash.x;
if (r.listing.stash.y != null) stashY = r.listing.stash.y;
}
if (r.listing.account?.name) account = r.listing.account.name;
}
return { id, w, h, stashX, stashY, account };
}
async function waitForVisible(locator, timeoutMs) {
try {
await locator.waitFor({ state: "visible", timeout: timeoutMs });
return true;
} catch {
return false;
}
}
async function handleConfirmDialog(page) {
await new Promise((r) => setTimeout(r, 500));
try {
const confirmBtn = page.locator(Selectors.ConfirmYesButton).first();
if (await waitForVisible(confirmBtn, 2000)) {
await confirmBtn.click();
log("Confirmed dialog");
}
} catch {
/* No dialog */
}
}
function handleWebSocket(ws, searchId) {
if (!ws.url().includes("/api/trade") || !ws.url().includes("/live/")) return;
log(`WebSocket connected for live search: ${searchId}`);
ws.on("framereceived", (frame) => {
if (pausedSearches.has(searchId)) return;
try {
const payload = typeof frame === "string" ? frame : frame.payload?.toString() ?? "";
const doc = JSON.parse(payload);
if (doc.new && Array.isArray(doc.new)) {
const ids = doc.new.filter((s) => s != null);
if (ids.length > 0) {
log(`New listings: ${searchId} (${ids.length} items)`);
sendEvent("newListings", { searchId, itemIds: ids });
}
}
} catch {
/* Non-JSON WebSocket frame */
}
});
ws.on("close", () => {
log(`WebSocket closed: ${searchId}`);
sendEvent("wsClose", { searchId });
});
}
// --- Command handlers ---
async function cmdStart(reqId, params) {
const { browserUserDataDir, headless, dashboardUrl } = params;
log(`Starting browser, userDataDir=${browserUserDataDir}, headless=${headless}`);
context = await chromium.launchPersistentContext(browserUserDataDir, {
channel: "chrome",
headless: !!headless,
viewport: null,
args: [
"--disable-blink-features=AutomationControlled",
"--disable-features=AutomationControlled",
"--no-first-run",
"--no-default-browser-check",
"--disable-infobars",
],
ignoreDefaultArgs: ["--enable-automation"],
});
await context.addInitScript(STEALTH_SCRIPT);
if (dashboardUrl) {
const pages = context.pages();
if (pages.length > 0) {
await pages[0].goto(dashboardUrl);
} else {
const p = await context.newPage();
await p.goto(dashboardUrl);
}
log(`Dashboard opened: ${dashboardUrl}`);
}
log("Browser launched (stealth active)");
sendResponse(reqId);
}
async function cmdAddSearch(reqId, params) {
if (!context) throw new Error("Browser not started");
const { url } = params;
const searchId = extractSearchId(url);
if (searchPages.has(searchId)) {
log(`Search already open: ${searchId}`);
sendResponse(reqId, { searchId });
return;
}
log(`Adding trade search: ${url} (${searchId})`);
const page = await context.newPage();
searchPages.set(searchId, page);
await page.goto(url, { waitUntil: "networkidle" });
await new Promise((r) => setTimeout(r, 2000)); // PageLoad delay
page.on("websocket", (ws) => handleWebSocket(ws, searchId));
try {
const liveBtn = page.locator(Selectors.LiveSearchButton).first();
await liveBtn.click({ timeout: 5000 });
log(`Live search activated: ${searchId}`);
} catch {
log(`Could not click Activate Live Search: ${searchId}`);
}
sendResponse(reqId, { searchId });
}
async function cmdPauseSearch(reqId, params) {
const { searchId } = params;
pausedSearches.add(searchId);
const page = searchPages.get(searchId);
if (page) {
await page.close();
searchPages.delete(searchId);
}
log(`Search paused: ${searchId}`);
sendResponse(reqId);
}
async function cmdClickTravel(reqId, params) {
const { pageId, itemId } = params;
// pageId is a searchId or scrapId
let page = searchPages.get(pageId) || scrapPages.get(pageId)?.page;
if (!page) {
sendResponse(reqId, { clicked: false });
return;
}
try {
if (itemId) {
const row = page.locator(Selectors.ListingById(itemId));
if (await waitForVisible(row, 5000)) {
const travelBtn = row.locator(Selectors.TravelToHideoutButton).first();
if (await waitForVisible(travelBtn, 3000)) {
await travelBtn.click();
log(`Clicked Travel to Hideout for item ${itemId}`);
await handleConfirmDialog(page);
sendResponse(reqId, { clicked: true });
return;
}
}
}
const btn = page.locator(Selectors.TravelToHideoutButton).first();
await btn.click({ timeout: 5000 });
log("Clicked Travel to Hideout");
await handleConfirmDialog(page);
sendResponse(reqId, { clicked: true });
} catch (ex) {
log(`Failed to click Travel to Hideout: ${ex.message}`);
sendResponse(reqId, { clicked: false });
}
}
async function cmdOpenScrapPage(reqId, params) {
if (!context) throw new Error("Browser not started");
const { url } = params;
const scrapId = `scrap-${++scrapIdCounter}`;
const page = await context.newPage();
const items = [];
page.on("response", async (response) => {
if (!response.url().includes("/api/trade2/fetch/")) return;
try {
const body = await response.text();
const doc = JSON.parse(body);
if (doc.result && Array.isArray(doc.result)) {
for (const r of doc.result) {
items.push(parseTradeItem(r));
}
}
} catch {
/* Non-JSON trade response */
}
});
await page.goto(url, { waitUntil: "networkidle" });
await new Promise((r) => setTimeout(r, 2000)); // PageLoad delay
scrapPages.set(scrapId, { page, items: [...items] });
log(`Scrap page opened: ${url} (${items.length} items) → ${scrapId}`);
sendResponse(reqId, { scrapId, items });
}
async function cmdReloadScrapPage(reqId, params) {
const { scrapId } = params;
const entry = scrapPages.get(scrapId);
if (!entry) throw new Error(`Unknown scrapId: ${scrapId}`);
const { page } = entry;
const items = [];
const onResponse = async (response) => {
if (!response.url().includes("/api/trade2/fetch/")) return;
try {
const body = await response.text();
const doc = JSON.parse(body);
if (doc.result && Array.isArray(doc.result)) {
for (const r of doc.result) {
items.push(parseTradeItem(r));
}
}
} catch {
/* Non-JSON */
}
};
page.on("response", onResponse);
await page.reload({ waitUntil: "networkidle" });
await new Promise((r) => setTimeout(r, 2000)); // PageLoad delay
page.removeListener("response", onResponse);
entry.items = [...items];
log(`Scrap page reloaded: ${scrapId} (${items.length} items)`);
sendResponse(reqId, { items });
}
async function cmdCloseScrapPage(reqId, params) {
const { scrapId } = params;
const entry = scrapPages.get(scrapId);
if (entry) {
try {
await entry.page.close();
} catch {
/* already closed */
}
scrapPages.delete(scrapId);
}
log(`Scrap page closed: ${scrapId}`);
sendResponse(reqId);
}
async function cmdStop(reqId) {
log("Stopping daemon...");
for (const [id, page] of searchPages) {
try {
await page.close();
} catch {
/* ignore */
}
}
searchPages.clear();
for (const [id, entry] of scrapPages) {
try {
await entry.page.close();
} catch {
/* ignore */
}
}
scrapPages.clear();
if (context) {
try {
await context.close();
} catch {
/* ignore */
}
context = null;
}
sendResponse(reqId);
log("Daemon stopped");
process.exit(0);
}
// --- Command dispatch ---
const handlers = {
start: cmdStart,
addSearch: cmdAddSearch,
pauseSearch: cmdPauseSearch,
clickTravel: cmdClickTravel,
openScrapPage: cmdOpenScrapPage,
reloadScrapPage: cmdReloadScrapPage,
closeScrapPage: cmdCloseScrapPage,
stop: cmdStop,
};
async function handleCommand(line) {
let msg;
try {
msg = JSON.parse(line);
} catch {
log(`Invalid JSON: ${line}`);
return;
}
const { reqId, cmd, ...params } = msg;
const handler = handlers[cmd];
if (!handler) {
sendError(reqId, `Unknown command: ${cmd}`);
return;
}
try {
await handler(reqId, params);
} catch (ex) {
log(`Command ${cmd} failed: ${ex.message}`);
sendError(reqId, ex.message);
}
}
// --- Main ---
const rl = createInterface({ input: process.stdin });
rl.on("line", (line) => handleCommand(line.trim()));
rl.on("close", () => {
log("stdin closed, shutting down");
process.exit(0);
});
// Signal ready
sendJson({ type: "ready" });
log("Daemon ready, waiting for commands...");

59
tools/trade-daemon/package-lock.json generated Normal file
View file

@ -0,0 +1,59 @@
{
"name": "trade-daemon",
"version": "1.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "trade-daemon",
"version": "1.0.0",
"dependencies": {
"playwright": "^1.49.0"
}
},
"node_modules/fsevents": {
"version": "2.3.2",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
"hasInstallScript": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
}
},
"node_modules/playwright": {
"version": "1.58.2",
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.58.2.tgz",
"integrity": "sha512-vA30H8Nvkq/cPBnNw4Q8TWz1EJyqgpuinBcHET0YVJVFldr8JDNiU9LaWAE1KqSkRYazuaBhTpB5ZzShOezQ6A==",
"license": "Apache-2.0",
"dependencies": {
"playwright-core": "1.58.2"
},
"bin": {
"playwright": "cli.js"
},
"engines": {
"node": ">=18"
},
"optionalDependencies": {
"fsevents": "2.3.2"
}
},
"node_modules/playwright-core": {
"version": "1.58.2",
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.58.2.tgz",
"integrity": "sha512-yZkEtftgwS8CsfYo7nm0KE8jsvm6i/PTgVtB8DL726wNf6H2IMsDuxCpJj59KDaxCtSnrWan2AeDqM7JBaultg==",
"license": "Apache-2.0",
"bin": {
"playwright-core": "cli.js"
},
"engines": {
"node": ">=18"
}
}
}
}

View file

@ -0,0 +1,9 @@
{
"name": "trade-daemon",
"version": "1.0.0",
"type": "module",
"private": true,
"dependencies": {
"playwright": "^1.49.0"
}
}