Skip to content

Commit

Permalink
Implemented Ban & Block Diff to identify responsible peers for SHA-1 …
Browse files Browse the repository at this point in the history
…failure

Fixed a cirtical (but rare) issue with "broken" peers that were sending invalid data which could cause SHA-1 to fail infinite (re-requesting and failing for the same piece)
  • Loading branch information
SuRGeoNix committed Nov 15, 2020
1 parent d1d3542 commit 1d9de6e
Show file tree
Hide file tree
Showing 6 changed files with 169 additions and 54 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
<TargetFramework>netcoreapp3.1</TargetFramework>
<RootNamespace>BitSwarmConsole</RootNamespace>
<AssemblyName>bswarm</AssemblyName>
<AssemblyVersion>2.2.5.0</AssemblyVersion>
<FileVersion>2.2.5.0</FileVersion>
<Version>2.2.5</Version>
<AssemblyVersion>2.2.6.0</AssemblyVersion>
<FileVersion>2.2.6.0</FileVersion>
<Version>2.2.6</Version>
</PropertyGroup>

<ItemGroup>
Expand Down
4 changes: 2 additions & 2 deletions BitSwarm (WinForms Demo)/Properties/AssemblyInfo.cs
Original file line number Diff line number Diff line change
Expand Up @@ -32,5 +32,5 @@
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("2.2.5.0")]
[assembly: AssemblyFileVersion("2.2.5.0")]
[assembly: AssemblyVersion("2.2.6.0")]
[assembly: AssemblyFileVersion("2.2.6.0")]
181 changes: 153 additions & 28 deletions BitSwarm/BitSwarm.cs
Original file line number Diff line number Diff line change
Expand Up @@ -373,10 +373,6 @@ public StatsUpdatedArgs(StatsStructure stats)

// Generators (Hash / Random)
public static SHA1 sha1 = new SHA1Managed();

/* Trying to identify a strange bug or Peers that send invalid data on purpose
public static SHA1 sha1_2 = new SHA1Managed();
public SHA1 sha1_3 = new SHA1Managed();*/
private static Random rnd = new Random();
internal byte[] peerID;

Expand Down Expand Up @@ -646,7 +642,7 @@ internal void FillPeers(Dictionary<string, int> newPeers, PeersStorage storage)
lock (peersForDispatch)
foreach (KeyValuePair<string, int> peerKV in newPeers)
{
if (!peersStored.ContainsKey(peerKV.Key))
if (!peersStored.ContainsKey(peerKV.Key) && !peersBanned.Contains(peerKV.Key))
{
peersStored.TryAdd(peerKV.Key, peerKV.Value);
Peer peer = new Peer(peerKV.Key, peerKV.Value);
Expand Down Expand Up @@ -685,7 +681,7 @@ internal void ReFillPeers()

foreach (var peerKV in peersStored)
{
if (!peersRunning.Contains(peerKV.Key))
if (!peersRunning.Contains(peerKV.Key) && !peersBanned.Contains(peerKV.Key))
{
Peer peer = new Peer(peerKV.Key, peerKV.Value);
peersForDispatch.Push(peer);
Expand Down Expand Up @@ -835,7 +831,7 @@ public string DumpStats()


stats += "\n";
stats += $" v2.2.4 " +
stats += $" v2.2.6 " +
$"{PadStats(String.Format("{0:n0}", Stats.DownRate/1024), 9)} KB/s | " +
$"{PadStats(String.Format("{0:n1}", ((Stats.DownRate * 8)/1000.0)/1000.0), 15)} Mbps | " +
$"Max: {String.Format("{0:n0}", Stats.MaxRate/1024)} KB/s, {String.Format("{0:n0}", ((Stats.MaxRate * 8)/1000.0)/1000.0)} Mbps";
Expand Down Expand Up @@ -1393,7 +1389,7 @@ internal void MetadataPieceRejected(int piece, string src)
Log($"[{src.PadRight(15, ' ')}] [RECV][M]\tPiece: {piece} Rejected");
}

// PieceBlock [Torrent Receive]
// PieceBlock [Torrent Receive]
internal void PieceReceived(byte[] data, int piece, int offset, Peer peer)
{
if (!isRunning) return;
Expand All @@ -1409,7 +1405,7 @@ internal void PieceReceived(byte[] data, int piece, int offset, Peer peer)
containsKey = torrent.data.pieceProgress.ContainsKey(piece);
pieceProgress = torrent.data.progress.GetBit(piece);
blockProgress = containsKey ? torrent.data.pieceProgress[piece].progress.GetBit(block) : false;

// Piece Done | Block Done
if ( (!containsKey && pieceProgress ) // Piece Done
|| ( containsKey && blockProgress ) ) // Block Done
Expand All @@ -1432,6 +1428,9 @@ internal void PieceReceived(byte[] data, int piece, int offset, Peer peer)
if (Options.Verbosity > 1) Log($"[{peer.host.PadRight(15, ' ')}] [RECV][P]\tPiece: {piece} Block: {block} Offset: {offset} Size: {data.Length} Requests: {peer.PiecesRequested}");
Stats.BytesDownloaded += data.Length;

// Keep track of received data in case of SHA1 failure to ban the responsible peer
recvBlocksTracking[$"{piece}|{block}"] = peer.host;

// Parse Block Data to Piece Data
Buffer.BlockCopy(data, 0, torrent.data.pieceProgress[piece].data, offset, data.Length);

Expand All @@ -1449,31 +1448,48 @@ internal void PieceReceived(byte[] data, int piece, int offset, Peer peer)
pieceHash = sha1.ComputeHash(torrent.data.pieceProgress[piece].data);
if (!Utils.ArrayComp(torrent.file.pieces[piece], pieceHash))
{
/*
// Trying to identify a strange bug or Peers that send invalid data on purpose
bool moreAttempts = false;
pieceHash = sha1.ComputeHash(torrent.data.pieceProgress[piece].data);
if (Utils.ArrayComp(torrent.file.pieces[piece], pieceHash)) { Log("Worked with 1"); moreAttempts = true;}
// Failure twice in a row | Ban peers with Diff blocks (possible to ban good ones | also in case of not diff / failed block came from same peer with same invalid data -> we ban the most famous one)
if (sha1FailedPieces.ContainsKey(piece))
{
if (Options.Verbosity > 0) Log($"[{peer.host.PadRight(15, ' ')}] [RECV][P]\tPiece: {piece} Block: {block} Offset: {offset} Size: {data.Length} Size2: {torrent.data.pieceProgress[piece].data.Length} SHA-1 failed | Doing Diff (both)");
SHA1FailedPiece sfp = new SHA1FailedPiece(piece, torrent.data.pieceProgress[piece].data, recvBlocksTracking, torrent.data.blocks);
List<string> responsiblePeers = SHA1FailedPiece.FindDiffs(sha1FailedPieces[piece], sfp, true);

pieceHash = sha1_2.ComputeHash(torrent.data.pieceProgress[piece].data);
if (Utils.ArrayComp(torrent.file.pieces[piece], pieceHash)) { Log("Worked with 2"); moreAttempts = true;}
// We will probably ban also good peers here
foreach(string host in responsiblePeers)
BanPeer(host);
}

pieceHash = sha1_3.ComputeHash(torrent.data.pieceProgress[piece].data);
if (Utils.ArrayComp(torrent.file.pieces[piece], pieceHash)) { Log("Worked with 3"); moreAttempts = true;}
// Failure first time | Keep piece data for later review
else
{
if (Options.Verbosity > 0) Log($"[{peer.host.PadRight(15, ' ')}] [RECV][P]\tPiece: {piece} Block: {block} Offset: {offset} Size: {data.Length} Size2: {torrent.data.pieceProgress[piece].data.Length} SHA-1 failed | Adding for review");
SHA1FailedPiece sfp = new SHA1FailedPiece(piece, torrent.data.pieceProgress[piece].data, recvBlocksTracking, torrent.data.blocks);
sha1FailedPieces.TryAdd(piece, sfp);
}

if (!moreAttempts)
{*/
Log($"[{peer.host.PadRight(15, ' ')}] [RECV][P]\tPiece: {piece} Block: {block} Offset: {offset} Size: {data.Length} Size2: {torrent.data.pieceProgress[piece].data.Length} SHA-1 validation failed");
Stats.BytesDropped += torrent.data.pieceProgress[piece].data.Length;
Stats.BytesDownloaded -= torrent.data.pieceProgress[piece].data.Length;
sha1Fails++;

Stats.BytesDropped += torrent.data.pieceProgress[piece].data.Length;
Stats.BytesDownloaded -= torrent.data.pieceProgress[piece].data.Length;
sha1Fails++;
UnSetRequestsBit(piece);
torrent.data.pieceProgress.Remove(piece);

UnSetRequestsBit(piece);
torrent.data.pieceProgress.Remove(piece);
return;
}

// Finally SHA-1 previously failed now success (found responsible peer for previous failure)
else if (sha1FailedPieces.ContainsKey(piece))
{
if (Options.Verbosity > 0) Log($"[{peer.host.PadRight(15, ' ')}] [RECV][P]\tPiece: {piece} Block: {block} Offset: {offset} Size: {data.Length} Size2: {torrent.data.pieceProgress[piece].data.Length} SHA-1 Success | Doing Diff");
SHA1FailedPiece sfp = new SHA1FailedPiece(piece, torrent.data.pieceProgress[piece].data, recvBlocksTracking, torrent.data.blocks);
List<string> responsiblePeers = SHA1FailedPiece.FindDiffs(sha1FailedPieces[piece], sfp);

return;
//}
foreach(string host in responsiblePeers)
BanPeer(host);

// Clean-up
sha1FailedPieces.TryRemove(piece, out SHA1FailedPiece tmp01);
}

// Save Piece in PartFiles [Thread-safe?]
Expand Down Expand Up @@ -1507,6 +1523,10 @@ internal void PieceReceived(byte[] data, int piece, int offset, Peer peer)

}
}

// Clean-Up
for (int i=0; i<torrent.data.blocks; i++)
recvBlocksTracking.TryRemove($"{piece}|{i}", out string tmp01);
}

// PieceBlock [Torrent Rejected | Timeout | Failed]
Expand Down Expand Up @@ -1618,6 +1638,102 @@ private void UnSetRequestsBit(int piece)
}
#endregion

#region Ban | SHA-1 Fails
private void BanPeer(string host)
{
Log($"[BAN] {host}");

peersBanned.Add(host);

lock (peersForDispatch)
foreach (var thread in bstp.Threads)
{
if (thread != null && thread.peer != null && thread.peer.host == host)
{
Log($"[BAN] {host} Found in BSTP");
thread.peer.Disconnect(); thread.peer = null;
peersStored.TryRemove(host, out int tmp01);
}
}
}
class SHA1FailedPiece
{
public int piece;
public byte[] data;
public ConcurrentDictionary<string, string> recvBlocksTracking = new ConcurrentDictionary<string, string>();

public SHA1FailedPiece(int piece, byte[] data, ConcurrentDictionary<string, string> tracking, int numberOfBlocks)
{
this.piece = piece;
this.data = data;

// Copy From Recv Tracking only Piece Specific
for (int i=0; i<numberOfBlocks; i++)
{
if (!tracking.ContainsKey($"{piece}|{i}"))
continue;
else
recvBlocksTracking[$"{piece}|{i}"] = tracking[$"{piece}|{i}"];
}

}

public static List<string> FindDiffs(SHA1FailedPiece sfp1, SHA1FailedPiece sfp2, bool bothSide = true)
{
// Will not check the LastPiece / LastBlock
if (sfp1 == null || sfp2 == null || sfp1.piece != sfp2.piece || sfp1.data.Length != sfp2.data.Length || sfp1.data.Length % Peer.MAX_DATA_SIZE != 0)
return null;

List<string> responsiblePeers = new List<string>();

Dictionary<string, int> famousCounter = new Dictionary<string, int>();

// Creates the list with responsible peers (diff blocks sent)
for(int i=0; i<=sfp1.data.Length % Peer.MAX_DATA_SIZE; i++)
{
if (!famousCounter.ContainsKey(sfp1.recvBlocksTracking[$"{sfp1.piece}|{i}"])) famousCounter.Add(sfp1.recvBlocksTracking[$"{sfp1.piece}|{i}"], 0);
if (!famousCounter.ContainsKey(sfp2.recvBlocksTracking[$"{sfp2.piece}|{i}"])) famousCounter.Add(sfp2.recvBlocksTracking[$"{sfp2.piece}|{i}"], 0);

famousCounter[sfp1.recvBlocksTracking[$"{sfp1.piece}|{i}"]]++;
famousCounter[sfp2.recvBlocksTracking[$"{sfp2.piece}|{i}"]]++;

byte[] block1 = Utils.ArraySub(ref sfp1.data, i, Peer.MAX_DATA_SIZE);
byte[] block2 = Utils.ArraySub(ref sfp2.data, i, Peer.MAX_DATA_SIZE);

if (!Utils.ArrayComp(block1, block2))
{
if (!responsiblePeers.Contains(sfp1.recvBlocksTracking[$"{sfp1.piece}|{i}"]))
responsiblePeers.Add(sfp1.recvBlocksTracking[$"{sfp1.piece}|{i}"]);

if (bothSide && !responsiblePeers.Contains(sfp2.recvBlocksTracking[$"{sfp2.piece}|{i}"]))
responsiblePeers.Add(sfp2.recvBlocksTracking[$"{sfp2.piece}|{i}"]);
}
}

// Same invalid blocks probably from same peer (let's gamble with posibilities)
if (responsiblePeers.Count == 0)
{
string mostFamous = "";
int curMin = 0;
foreach (var famous in famousCounter)
{
if (famous.Value > curMin)
{
curMin = famous.Value;
mostFamous = famous.Key;
}
}
if (mostFamous != "") responsiblePeers.Add(mostFamous);
}

return responsiblePeers;
}
}
HashSet<string> peersBanned = new HashSet<string>();
ConcurrentDictionary<int, SHA1FailedPiece> sha1FailedPieces = new ConcurrentDictionary<int, SHA1FailedPiece>();
ConcurrentDictionary<string, string> recvBlocksTracking = new ConcurrentDictionary<string, string>();
#endregion

#region Misc
private void CreatePieceProgress(int piece)
{
Expand All @@ -1638,6 +1754,15 @@ private int GetBlockSize(int piece, int block)
return torrent.data.blockSize;
}
internal bool NoPiecesPeer(Peer peer) { return torrent.metadata.isDone && !peer.stageYou.haveAll && (peer.stageYou.haveNone || peer.stageYou.bitfield == null || torrent.data.requests.GetFirst01(peer.stageYou.bitfield) == -1); }
public void CancelRequestedPieces()
{
lock (peersForDispatch)
foreach (var thread in bstp.Threads)
{
if (thread != null && thread.peer != null && thread.peer.status == Peer.Status.DOWNLOADING)
thread.peer.CancelPieces();
}
}
internal void Log(string msg) { if (Options.Verbosity > 0) log.Write($"[BitSwarm] {msg}"); }
#endregion
}
Expand Down
26 changes: 5 additions & 21 deletions BitSwarm/BitSwarm.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -14,28 +14,12 @@
<RepositoryType>git</RepositoryType>
<PackageTags>bitswarm bittorrent torrent client streaming dht</PackageTags>
<Copyright>© SuRGeoNix 2020</Copyright>
<Version>2.2.5</Version>
<PackageReleaseNotes>First Alpha Testing Completed (started on v2.0)
<Version>2.2.6</Version>
<PackageReleaseNotes>Implemented Ban &amp; Block Diff to identify responsible peers for failure

Improved Performance, Throughput &amp; Stability

- New Threading Implementation (BSTP)
- New Timeout, Retries &amp; Rejects Handling
- New Functionality for Allow Fast, Boost &amp; Sleep Mode
- New Focus Areas for Streaming (Faster &amp; Stable)
- Reduced Dropped Bytes &amp; Peers Miscommunication
- Network Sockets Stability &amp; Throughput
- Peers Autonomous Life Cycle (Request/Receive/Timeout/Reject)
- Large Number of Issues Fixed (including Deadlocks, DHT, Trackers)

Final Adjustments on v2.2.5

1) DHT Threads &amp; Timing Adjustments
2) DHT Socket Stability (EndPoints)
3) Peers Communication (Less Drop Bytes &amp; Handle Multiple Rejects to avoid overhead)
4) Stats progress re-calculation on include/exclude files</PackageReleaseNotes>
<FileVersion>2.2.5.0</FileVersion>
<AssemblyVersion>2.2.5.0</AssemblyVersion>
Fixed a cirtical (but rare) issue with "broken" peers that were sending invalid data which could cause SHA-1 to fail infinite (re-requesting and failing for the same piece).</PackageReleaseNotes>
<FileVersion>2.2.6.0</FileVersion>
<AssemblyVersion>2.2.6.0</AssemblyVersion>
</PropertyGroup>

<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
Expand Down
3 changes: 3 additions & 0 deletions BitSwarm/Peer.cs
Original file line number Diff line number Diff line change
Expand Up @@ -886,7 +886,10 @@ public void CancelPieces()
sendBuff = new byte[0];

foreach (Tuple<int, int, int> piece in lastPieces)
{
if (Beggar.Options.Verbosity > 0) Log(4, $"[REQC] [P]\tPiece: {piece.Item1} Block: {piece.Item2 / Beggar.torrent.data.blockSize} Offset: {piece.Item2} Size: {piece.Item3} Requests: {piecesRequested}");
sendBuff = Utils.ArrayMerge(sendBuff, PrepareMessage(Messages.CANCEL, false, Utils.ArrayMerge(Utils.ToBigEndian((Int32)piece.Item1), Utils.ToBigEndian((Int32)piece.Item2), Utils.ToBigEndian((Int32)piece.Item3))));
}

tcpStream.Write(sendBuff, 0, sendBuff.Length);
}
Expand Down
3 changes: 3 additions & 0 deletions BitSwarm/Torrent.cs
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,9 @@ public void FillFromMetadata()

BDictionary bInfo = (BDictionary) bParser.Parse(metadata.file.FileName);
FillFromInfo(bInfo);

if (file.infoHash != Utils.ArrayToStringHex(sha1.ComputeHash(bInfo.EncodeAsBytes())))
Console.WriteLine("CRITICAL!!!! Metadata SHA1 validation failed");
}
public void FillFromInfo(BDictionary bInfo)
{
Expand Down

0 comments on commit 1d9de6e

Please sign in to comment.