revert: peer batching

This commit is contained in:
HDVinnie
2022-11-14 20:10:26 -05:00
parent 2fca81af28
commit ace8dcb5b1
9 changed files with 4 additions and 117 deletions

View File

@@ -45,7 +45,7 @@ class AutoFlushPeers extends Command
public function handle(): void
{
$carbon = new Carbon();
$peers = Peer::select(['torrent_id', 'user_id', 'peer_id', 'updated_at'])->where('updated_at', '<', $carbon->copy()->subHours(2)->toDateTimeString())->get();
$peers = Peer::select(['id', 'torrent_id', 'user_id', 'updated_at'])->where('updated_at', '<', $carbon->copy()->subHours(2)->toDateTimeString())->get();
foreach ($peers as $peer) {
$history = History::where('torrent_id', '=', $peer->torrent_id)->where('user_id', '=', $peer->user_id)->first();

View File

@@ -1,83 +0,0 @@
<?php
/**
* NOTICE OF LICENSE.
*
* UNIT3D Community Edition is open-sourced software licensed under the GNU Affero General Public License v3.0
* The details is bundled with this project in the file LICENSE.txt.
*
* @project UNIT3D Community Edition
*
* @author Roardom <roardom@protonmail.com>
* @license https://www.gnu.org/licenses/agpl-3.0.en.html/ GNU Affero General Public License v3.0
*/
namespace App\Console\Commands;
use App\Models\Peer;
use Illuminate\Console\Command;
use Illuminate\Support\Facades\Redis;
/**
* @see \Tests\Unit\Console\Commands\AutoFlushPeersTest
*/
class AutoInsertPeers extends Command
{
/**
* MySql can handle a max of 65k placeholders per query,
* and there are 13 fields on each peer that are updated
*/
public const PEERS_PER_CYCLE = 65_000 / 13;
/**
* The name and signature of the console command.
*
* @var string
*/
protected $signature = 'auto:insert_peers';
/**
* The console command description.
*
* @var string
*/
protected $description = 'Inserts peers in batches';
/**
* Execute the console command.
*
* @throws \Exception
*/
public function handle(): void
{
$key = config('cache.prefix').':peers:batch';
$peerCount = Redis::connection('cache')->command('LLEN', [$key]);
$cycles = ceil($peerCount / self::PEERS_PER_CYCLE);
for ($i = 0; $i < $cycles; $i++) {
$peers = Redis::connection('cache')->command('RPOP', [$key, self::PEERS_PER_CYCLE]);
$peers = array_map('unserialize', $peers);
Peer::upsert(
$peers,
['peer_id'],
[
'peer_id',
'md5_peer_id',
'info_hash',
'ip',
'port',
'agent',
'uploaded',
'downloaded',
'seeder',
'left',
'torrent_id',
'user_id',
'updated_at'
],
);
}
$this->comment('Automated insert peers command complete');
}
}

View File

@@ -47,7 +47,6 @@ class Kernel extends ConsoleKernel
$schedule->command('auto:stats_clients')->daily();
$schedule->command('auto:remove_torrent_buffs')->hourly();
$schedule->command('auto:torrent_balance')->hourly();
//$schedule->command('auto:insert_peers')->everyMinute();
//$schedule->command('auto:ban_disposable_users')->weekends();
//$schedule->command('backup:clean')->daily();
//$schedule->command('backup:run')->daily();

View File

@@ -41,7 +41,7 @@ class FlushController extends Controller
public function peers(): \Illuminate\Http\RedirectResponse
{
$carbon = new Carbon();
$peers = Peer::select(['torrent_id', 'user_id', 'peer_id', 'updated_at'])->where('updated_at', '<', $carbon->copy()->subHours(2)->toDateTimeString())->get();
$peers = Peer::select(['id', 'torrent_id', 'user_id', 'updated_at'])->where('updated_at', '<', $carbon->copy()->subHours(2)->toDateTimeString())->get();
foreach ($peers as $peer) {
$history = History::where('torrent_id', '=', $peer->torrent_id)->where('user_id', '=', $peer->user_id)->first();

View File

@@ -1442,7 +1442,7 @@ class UserController extends Controller
$carbon = new Carbon();
// Get Peer List from User
$peers = Peer::select(['torrent_id', 'user_id', 'peer_id', 'updated_at'])
$peers = Peer::select(['id', 'torrent_id', 'user_id', 'updated_at'])
->where('user_id', '=', $user->id)
->where('updated_at', '<', $carbon->copy()->subMinutes(70)->toDateTimeString())
->get();

View File

@@ -79,6 +79,7 @@ class UserActive extends Component
return Peer::query()
->join('torrents', 'peers.torrent_id', '=', 'torrents.id')
->select(
'peers.id',
'peers.ip',
'peers.port',
'peers.agent',

View File

@@ -24,7 +24,6 @@ use Illuminate\Foundation\Bus\Dispatchable;
use Illuminate\Queue\InteractsWithQueue;
use Illuminate\Queue\Middleware\WithoutOverlapping;
use Illuminate\Queue\SerializesModels;
use Illuminate\Support\Facades\Redis;
class ProcessAnnounce implements ShouldQueue
{
@@ -151,7 +150,6 @@ class ProcessAnnounce implements ShouldQueue
$peer->updateConnectableStateIfNeeded();
$peer->updated_at = \now();
$peer->save();
//Redis::connection('cache')->command('LPUSH', [config('cache.prefix').':peers:batch', serialize($peer)]);
$history->user_id = $this->user->id;
$history->torrent_id = $this->torrent->id;
@@ -185,7 +183,6 @@ class ProcessAnnounce implements ShouldQueue
$peer->updateConnectableStateIfNeeded();
$peer->updated_at = \now();
$peer->save();
//Redis::connection('cache')->command('LPUSH', [config('cache.prefix').':peers:batch', serialize($peer)]);
$history->user_id = $this->user->id;
$history->torrent_id = $this->torrent->id;
@@ -280,7 +277,6 @@ class ProcessAnnounce implements ShouldQueue
$peer->updateConnectableStateIfNeeded();
$peer->updated_at = \now();
$peer->save();
//Redis::connection('cache')->command('LPUSH', [config('cache.prefix').':peers:batch', serialize($peer)]);
$history->user_id = $this->user->id;
$history->torrent_id = $this->torrent->id;

View File

@@ -21,12 +21,6 @@ class Peer extends Model
{
use HasFactory;
protected $primaryKey = 'peer_id';
public $incrementing = false;
protected $keyType = 'string';
/**
* Belongs To A User.
*/

View File

@@ -1,20 +0,0 @@
<?php
use Illuminate\Database\Migrations\Migration;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Support\Facades\Schema;
return new class () extends Migration {
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::table('peers', function (Blueprint $table) {
$table->dropColumn('id');
$table->primary(['torrent_id', 'user_id', 'peer_id']);
});
}
};