forked from schmittlauch/Hash2Pub
Compare commits
3 commits
d049b65f1e
...
b8eeb852da
Author | SHA1 | Date | |
---|---|---|---|
Trolli Schmittlauch | b8eeb852da | ||
Trolli Schmittlauch | 7fd369c8c5 | ||
Hécate | 41e999ed99 |
8
.hlint.yaml
Normal file
8
.hlint.yaml
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
- group: {name: generalise, enabled: true}
|
||||||
|
|
||||||
|
- warn: { name: Use DerivingStrategies }
|
||||||
|
|
||||||
|
- error: { lhs: return, rhs: pure }
|
||||||
|
|
||||||
|
- ignore: {name: "Avoid lambda using `infix`"}
|
||||||
|
|
|
@ -23,6 +23,7 @@ in
|
||||||
haddock
|
haddock
|
||||||
cabal-install
|
cabal-install
|
||||||
hlint
|
hlint
|
||||||
|
stylish-haskell
|
||||||
pkgs.python3Packages.asn1ate
|
pkgs.python3Packages.asn1ate
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
import Hash2Pub.FediChord
|
import Data.Map.Internal.Debug (showTree)
|
||||||
import Data.Map.Internal.Debug (showTree)
|
import qualified Data.Map.Strict as Map
|
||||||
import qualified Data.Map.Strict as Map
|
import Hash2Pub.FediChord
|
||||||
|
|
||||||
giebMalCache :: [Integer] -> Map.Map NodeID ()
|
giebMalCache :: [Integer] -> Map.Map NodeID ()
|
||||||
giebMalCache = Map.fromList . map (mkCacheEntry . fromInteger)
|
giebMalCache = Map.fromList . fmap (mkCacheEntry . fromInteger)
|
||||||
where
|
where
|
||||||
mkCacheEntry nodeid = (nodeid, ())
|
mkCacheEntry nodeid = (nodeid, ())
|
||||||
|
|
||||||
|
@ -18,11 +18,11 @@ nidLookupGT m = flip Map.lookupGT m . fromInteger
|
||||||
edgeCase1 :: IO ()
|
edgeCase1 :: IO ()
|
||||||
edgeCase1 = do
|
edgeCase1 = do
|
||||||
putStrLn "Let there be a Map with the keys [2^255+2^254+3, 2, 2^253], all keys are NodeIDs mod 2^256."
|
putStrLn "Let there be a Map with the keys [2^255+2^254+3, 2, 2^253], all keys are NodeIDs mod 2^256."
|
||||||
print testOverlap
|
print testOverlap
|
||||||
putStrLn "\nWhile (NodeID 2^255+2^254+3) > (NodeID 2^254 + 14) …"
|
putStrLn "\nWhile (NodeID 2^255+2^254+3) > (NodeID 2^254 + 14) …"
|
||||||
print $ toNodeID (2^255+2^254+3) > toNodeID (2^254+14)
|
print $ toNodeID (2^255+2^254+3) > toNodeID (2^254+14)
|
||||||
putStrLn "… and 2^255+2^254+3 is an element of the map…"
|
putStrLn "… and 2^255+2^254+3 is an element of the map…"
|
||||||
print $ Map.member (fromInteger 2^255+2^254+3) testOverlap
|
print $ Map.member (fromInteger (2^255+2^254+3)) testOverlap
|
||||||
putStrLn "… looking for an element larger than 2^254 + 14 doesn't yield any."
|
putStrLn "… looking for an element larger than 2^254 + 14 doesn't yield any."
|
||||||
print $ nidLookupGT testOverlap (2^254+14)
|
print $ nidLookupGT testOverlap (2^254+14)
|
||||||
putStrLn "\nThat's the tree of the map:"
|
putStrLn "\nThat's the tree of the map:"
|
||||||
|
|
|
@ -2,25 +2,25 @@
|
||||||
|
|
||||||
module Hash2Pub.ASN1Coding where
|
module Hash2Pub.ASN1Coding where
|
||||||
|
|
||||||
import Data.ASN1.Encoding -- asn1-encoding package
|
import Control.Exception (displayException)
|
||||||
import Data.ASN1.BinaryEncoding
|
import Data.ASN1.BinaryEncoding
|
||||||
import Data.ASN1.Error()
|
import Data.ASN1.Encoding
|
||||||
import Data.ASN1.Types -- asn1-types package
|
import Data.ASN1.Error ()
|
||||||
import Data.ASN1.Parse
|
import Data.ASN1.Parse
|
||||||
import Data.Maybe (fromMaybe, mapMaybe, isNothing)
|
import Data.ASN1.Types
|
||||||
import Data.Time.Clock.POSIX()
|
import Data.Bifunctor (first)
|
||||||
import qualified Data.ByteString as BS
|
import qualified Data.ByteString as BS
|
||||||
import qualified Data.Set as Set
|
import qualified Data.Map.Strict as Map
|
||||||
import qualified Data.Map.Strict as Map
|
import Data.Maybe (fromMaybe, isNothing, mapMaybe)
|
||||||
import Data.Bifunctor (first)
|
import qualified Data.Set as Set
|
||||||
import Control.Exception (displayException)
|
import Data.Time.Clock.POSIX ()
|
||||||
import Safe
|
import Safe
|
||||||
|
|
||||||
import Hash2Pub.FediChord
|
import Hash2Pub.DHTProtocol
|
||||||
import Hash2Pub.Utils
|
import Hash2Pub.FediChord
|
||||||
import Hash2Pub.DHTProtocol
|
import Hash2Pub.Utils
|
||||||
|
|
||||||
import Debug.Trace
|
import Debug.Trace
|
||||||
|
|
||||||
-- TODO: make this splitting function more intelligent, currently it creates many parts that are smaller than they could be, see #18
|
-- TODO: make this splitting function more intelligent, currently it creates many parts that are smaller than they could be, see #18
|
||||||
-- | Try splitting a payload into multiple parts to be able to reduce size of
|
-- | Try splitting a payload into multiple parts to be able to reduce size of
|
||||||
|
@ -107,7 +107,7 @@ serialiseMessage maxBytesLength msg =
|
||||||
payloadParts i = zip [1..] . splitPayload i <$> actionPayload
|
payloadParts i = zip [1..] . splitPayload i <$> actionPayload
|
||||||
actionPayload = payload msg
|
actionPayload = payload msg
|
||||||
encodedMsgs i = Map.map encodeMsg $ messageParts i
|
encodedMsgs i = Map.map encodeMsg $ messageParts i
|
||||||
maxMsgLength = maximum . map BS.length . Map.elems
|
maxMsgLength = maximum . fmap BS.length . Map.elems
|
||||||
|
|
||||||
-- | encode a 'FediChordMessage' to a bytestring without further modification
|
-- | encode a 'FediChordMessage' to a bytestring without further modification
|
||||||
encodeMsg :: FediChordMessage -> BS.ByteString
|
encodeMsg :: FediChordMessage -> BS.ByteString
|
||||||
|
@ -124,39 +124,39 @@ deserialiseMessage msgBytes = first displayException (decodeASN1' DER msgBytes)
|
||||||
-- indicated by the data constructor, as ASN.1
|
-- indicated by the data constructor, as ASN.1
|
||||||
encodePayload :: ActionPayload -> [ASN1]
|
encodePayload :: ActionPayload -> [ASN1]
|
||||||
encodePayload LeaveResponsePayload = [Null]
|
encodePayload LeaveResponsePayload = [Null]
|
||||||
encodePayload payload'@LeaveRequestPayload{} =
|
encodePayload payload'@LeaveRequestPayload{} =
|
||||||
Start Sequence
|
Start Sequence
|
||||||
: Start Sequence
|
: Start Sequence
|
||||||
: map (IntVal . getNodeID) (leaveSuccessors payload')
|
: fmap (IntVal . getNodeID) (leaveSuccessors payload')
|
||||||
++ [End Sequence
|
<> [End Sequence
|
||||||
, Start Sequence]
|
, Start Sequence]
|
||||||
++ map (IntVal . getNodeID) (leavePredecessors payload')
|
<> fmap (IntVal . getNodeID) (leavePredecessors payload')
|
||||||
++ [End Sequence
|
<> [End Sequence
|
||||||
, End Sequence]
|
, End Sequence]
|
||||||
-- currently StabiliseResponsePayload and LeaveRequestPayload are equal
|
-- currently StabiliseResponsePayload and LeaveRequestPayload are equal
|
||||||
encodePayload payload'@StabiliseResponsePayload{} =
|
encodePayload payload'@StabiliseResponsePayload{} =
|
||||||
Start Sequence
|
Start Sequence
|
||||||
: Start Sequence
|
: Start Sequence
|
||||||
: map (IntVal . getNodeID) (stabiliseSuccessors payload')
|
: fmap (IntVal . getNodeID) (stabiliseSuccessors payload')
|
||||||
++ [End Sequence
|
<> [End Sequence
|
||||||
, Start Sequence]
|
, Start Sequence]
|
||||||
++ map (IntVal . getNodeID) (stabilisePredecessors payload')
|
<> fmap (IntVal . getNodeID) (stabilisePredecessors payload')
|
||||||
++ [End Sequence
|
<> [End Sequence
|
||||||
, End Sequence]
|
, End Sequence]
|
||||||
encodePayload payload'@StabiliseRequestPayload = [Null]
|
encodePayload payload'@StabiliseRequestPayload = [Null]
|
||||||
encodePayload payload'@QueryIDResponsePayload{} =
|
encodePayload payload'@QueryIDResponsePayload{} =
|
||||||
let
|
let
|
||||||
resp = queryResult payload'
|
resp = queryResult payload'
|
||||||
in
|
in
|
||||||
Start Sequence
|
Start Sequence
|
||||||
: encodeQueryResult resp
|
: encodeQueryResult resp
|
||||||
: case resp of
|
: case resp of
|
||||||
FOUND ns -> encodeNodeState $ ns
|
FOUND ns -> encodeNodeState ns
|
||||||
FORWARD entrySet ->
|
FORWARD entrySet ->
|
||||||
Start Sequence
|
Start Sequence
|
||||||
: (concatMap encodeCacheEntry . Set.elems $ entrySet)
|
: (concatMap encodeCacheEntry . Set.elems $ entrySet)
|
||||||
++ [End Sequence]
|
<> [End Sequence]
|
||||||
++ [End Sequence]
|
<> [End Sequence]
|
||||||
encodePayload payload'@QueryIDRequestPayload{} = [
|
encodePayload payload'@QueryIDRequestPayload{} = [
|
||||||
Start Sequence
|
Start Sequence
|
||||||
, IntVal . getNodeID $ queryTargetID payload'
|
, IntVal . getNodeID $ queryTargetID payload'
|
||||||
|
@ -167,21 +167,21 @@ encodePayload payload'@QueryIDRequestPayload{} = [
|
||||||
encodePayload payload'@JoinResponsePayload{} =
|
encodePayload payload'@JoinResponsePayload{} =
|
||||||
Start Sequence
|
Start Sequence
|
||||||
: Start Sequence
|
: Start Sequence
|
||||||
: map (IntVal . getNodeID) (joinSuccessors payload')
|
: fmap (IntVal . getNodeID) (joinSuccessors payload')
|
||||||
++ [End Sequence
|
<> [End Sequence
|
||||||
, Start Sequence]
|
, Start Sequence]
|
||||||
++ map (IntVal . getNodeID) (joinPredecessors payload')
|
<> fmap (IntVal . getNodeID) (joinPredecessors payload')
|
||||||
++ [End Sequence
|
<> [End Sequence
|
||||||
, Start Sequence]
|
, Start Sequence]
|
||||||
++ concatMap encodeCacheEntry (joinCache payload')
|
<> concatMap encodeCacheEntry (joinCache payload')
|
||||||
++ [End Sequence
|
<> [End Sequence
|
||||||
, End Sequence]
|
, End Sequence]
|
||||||
encodePayload payload'@JoinRequestPayload{} = [Null]
|
encodePayload payload'@JoinRequestPayload{} = [Null]
|
||||||
encodePayload PingRequestPayload{} = [Null]
|
encodePayload PingRequestPayload{} = [Null]
|
||||||
encodePayload payload'@PingResponsePayload{} =
|
encodePayload payload'@PingResponsePayload{} =
|
||||||
Start Sequence
|
Start Sequence
|
||||||
: concatMap encodeNodeState (pingNodeStates payload')
|
: concatMap encodeNodeState (pingNodeStates payload')
|
||||||
++ [End Sequence]
|
<> [End Sequence]
|
||||||
|
|
||||||
encodeNodeState :: NodeState -> [ASN1]
|
encodeNodeState :: NodeState -> [ASN1]
|
||||||
encodeNodeState ns = [
|
encodeNodeState ns = [
|
||||||
|
@ -200,13 +200,13 @@ encodeCacheEntry (RemoteCacheEntry ns timestamp) =
|
||||||
Start Sequence
|
Start Sequence
|
||||||
: encodeNodeState ns
|
: encodeNodeState ns
|
||||||
-- ToDo: possibly optimise this by using dlists
|
-- ToDo: possibly optimise this by using dlists
|
||||||
++ [
|
<> [
|
||||||
IntVal . fromIntegral . fromEnum $ timestamp
|
IntVal . fromIntegral . fromEnum $ timestamp
|
||||||
, End Sequence]
|
, End Sequence]
|
||||||
encodeCacheEntry _ = []
|
encodeCacheEntry _ = []
|
||||||
|
|
||||||
encodeQueryResult :: QueryResponse -> ASN1
|
encodeQueryResult :: QueryResponse -> ASN1
|
||||||
encodeQueryResult FOUND{} = Enumerated 0
|
encodeQueryResult FOUND{} = Enumerated 0
|
||||||
encodeQueryResult FORWARD{} = Enumerated 1
|
encodeQueryResult FORWARD{} = Enumerated 1
|
||||||
|
|
||||||
-- | Encode a 'FediChordMessage' as ASN.1.
|
-- | Encode a 'FediChordMessage' as ASN.1.
|
||||||
|
@ -218,11 +218,11 @@ encodeMessage
|
||||||
: (Enumerated . fromIntegral . fromEnum $ action)
|
: (Enumerated . fromIntegral . fromEnum $ action)
|
||||||
: IntVal requestID
|
: IntVal requestID
|
||||||
: encodeNodeState sender
|
: encodeNodeState sender
|
||||||
++ [
|
<> [
|
||||||
IntVal parts
|
IntVal parts
|
||||||
, IntVal part ]
|
, IntVal part ]
|
||||||
++ maybe [] encodePayload requestPayload
|
<> maybe [] encodePayload requestPayload
|
||||||
++ [End Sequence]
|
<> [End Sequence]
|
||||||
encodeMessage
|
encodeMessage
|
||||||
(Response responseTo senderID parts part action responsePayload) = [
|
(Response responseTo senderID parts part action responsePayload) = [
|
||||||
Start Sequence
|
Start Sequence
|
||||||
|
@ -231,8 +231,8 @@ encodeMessage
|
||||||
, IntVal parts
|
, IntVal parts
|
||||||
, IntVal part
|
, IntVal part
|
||||||
, Enumerated . fromIntegral . fromEnum $ action]
|
, Enumerated . fromIntegral . fromEnum $ action]
|
||||||
++ maybe [] encodePayload responsePayload
|
<> maybe [] encodePayload responsePayload
|
||||||
++ [End Sequence]
|
<> [End Sequence]
|
||||||
|
|
||||||
-- ===== parser combinators =====
|
-- ===== parser combinators =====
|
||||||
|
|
||||||
|
@ -240,21 +240,21 @@ parseMessage :: ParseASN1 FediChordMessage
|
||||||
parseMessage = do
|
parseMessage = do
|
||||||
begin <- getNext
|
begin <- getNext
|
||||||
case begin of
|
case begin of
|
||||||
Start Sequence -> return ()
|
Start Sequence -> pure ()
|
||||||
x -> throwParseError $ "unexpected ASN.1 element " ++ show x
|
x -> throwParseError $ "unexpected ASN.1 element " <> show x
|
||||||
-- request and response messages are distiguishable by their structure,
|
-- request and response messages are distiguishable by their structure,
|
||||||
-- see ASN.1 schema
|
-- see ASN.1 schema
|
||||||
firstElem <- getNext
|
firstElem <- getNext
|
||||||
message <- case firstElem of
|
message <- case firstElem of
|
||||||
Enumerated a -> parseRequest . toEnum . fromIntegral $ a
|
Enumerated a -> parseRequest . toEnum . fromIntegral $ a
|
||||||
IntVal i -> parseResponse i
|
IntVal i -> parseResponse i
|
||||||
other -> throwParseError $ "unexpected first ASN1 element: " ++ show other
|
other -> throwParseError $ "unexpected first ASN1 element: " <> show other
|
||||||
-- consume sequence end
|
-- consume sequence end
|
||||||
end <- getNext
|
end <- getNext
|
||||||
case end of
|
case end of
|
||||||
End Sequence -> return ()
|
End Sequence -> pure ()
|
||||||
x -> throwParseError $ "unexpected ASN.1 element " ++ show x
|
x -> throwParseError $ "unexpected ASN.1 element " <> show x
|
||||||
return message
|
pure message
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -265,14 +265,14 @@ parseRequest action = do
|
||||||
parts <- parseInteger
|
parts <- parseInteger
|
||||||
part <- parseInteger
|
part <- parseInteger
|
||||||
hasPayload <- hasNext
|
hasPayload <- hasNext
|
||||||
payload <- if not hasPayload then return Nothing else Just <$> case action of
|
payload <- if not hasPayload then pure Nothing else Just <$> case action of
|
||||||
QueryID -> parseQueryIDRequest
|
QueryID -> parseQueryIDRequest
|
||||||
Join -> parseJoinRequest
|
Join -> parseJoinRequest
|
||||||
Leave -> parseLeaveRequest
|
Leave -> parseLeaveRequest
|
||||||
Stabilise -> parseStabiliseRequest
|
Stabilise -> parseStabiliseRequest
|
||||||
Ping -> parsePingRequest
|
Ping -> parsePingRequest
|
||||||
|
|
||||||
return $ Request requestID sender parts part action payload
|
pure $ Request requestID sender parts part action payload
|
||||||
|
|
||||||
parseResponse :: Integer -> ParseASN1 FediChordMessage
|
parseResponse :: Integer -> ParseASN1 FediChordMessage
|
||||||
parseResponse responseTo = do
|
parseResponse responseTo = do
|
||||||
|
@ -281,49 +281,49 @@ parseResponse responseTo = do
|
||||||
part <- parseInteger
|
part <- parseInteger
|
||||||
action <- parseEnum :: ParseASN1 Action
|
action <- parseEnum :: ParseASN1 Action
|
||||||
hasPayload <- hasNext
|
hasPayload <- hasNext
|
||||||
payload <- if not hasPayload then return Nothing else Just <$> case action of
|
payload <- if not hasPayload then pure Nothing else Just <$> case action of
|
||||||
QueryID -> parseQueryIDResponse
|
QueryID -> parseQueryIDResponse
|
||||||
Join -> parseJoinResponse
|
Join -> parseJoinResponse
|
||||||
Leave -> parseLeaveResponse
|
Leave -> parseLeaveResponse
|
||||||
Stabilise -> parseStabiliseResponse
|
Stabilise -> parseStabiliseResponse
|
||||||
Ping -> parsePingResponse
|
Ping -> parsePingResponse
|
||||||
|
|
||||||
return $ Response responseTo senderID parts part action payload
|
pure $ Response responseTo senderID parts part action payload
|
||||||
|
|
||||||
parseInteger :: ParseASN1 Integer
|
parseInteger :: ParseASN1 Integer
|
||||||
parseInteger = do
|
parseInteger = do
|
||||||
i <- getNext
|
i <- getNext
|
||||||
case i of
|
case i of
|
||||||
IntVal parsed -> return parsed
|
IntVal parsed -> pure parsed
|
||||||
x -> throwParseError $ "Expected IntVal but got " ++ show x
|
x -> throwParseError $ "Expected IntVal but got " <> show x
|
||||||
|
|
||||||
parseEnum :: Enum a => ParseASN1 a
|
parseEnum :: Enum a => ParseASN1 a
|
||||||
parseEnum = do
|
parseEnum = do
|
||||||
e <- getNext
|
e <- getNext
|
||||||
case e of
|
case e of
|
||||||
Enumerated en -> return $ toEnum . fromIntegral $ en
|
Enumerated en -> pure $ toEnum . fromIntegral $ en
|
||||||
x -> throwParseError $ "Expected Enumerated but got " ++ show x
|
x -> throwParseError $ "Expected Enumerated but got " <> show x
|
||||||
|
|
||||||
parseString :: ParseASN1 String
|
parseString :: ParseASN1 String
|
||||||
parseString = do
|
parseString = do
|
||||||
s <- getNext
|
s <- getNext
|
||||||
case s of
|
case s of
|
||||||
ASN1String toBeParsed -> maybe (throwParseError "string parsing failed") return $ asn1CharacterToString toBeParsed
|
ASN1String toBeParsed -> maybe (throwParseError "string parsing failed") pure $ asn1CharacterToString toBeParsed
|
||||||
x -> throwParseError $ "Expected a ASN1String but got " ++ show x
|
x -> throwParseError $ "Expected a ASN1String but got " <> show x
|
||||||
|
|
||||||
parseOctets :: ParseASN1 BS.ByteString
|
parseOctets :: ParseASN1 BS.ByteString
|
||||||
parseOctets = do
|
parseOctets = do
|
||||||
os <- getNext
|
os <- getNext
|
||||||
case os of
|
case os of
|
||||||
OctetString bs -> return bs
|
OctetString bs -> pure bs
|
||||||
x -> throwParseError $ "Expected an OctetString but got " ++ show x
|
x -> throwParseError $ "Expected an OctetString but got " <> show x
|
||||||
|
|
||||||
parseNull :: ParseASN1 ()
|
parseNull :: ParseASN1 ()
|
||||||
parseNull = do
|
parseNull = do
|
||||||
n <- getNext
|
n <- getNext
|
||||||
case n of
|
case n of
|
||||||
Null -> return ()
|
Null -> pure ()
|
||||||
x -> throwParseError $ "Expected Null but got " ++ show x
|
x -> throwParseError $ "Expected Null but got " <> show x
|
||||||
|
|
||||||
parseNodeState :: ParseASN1 NodeState
|
parseNodeState :: ParseASN1 NodeState
|
||||||
parseNodeState = onNextContainer Sequence $ do
|
parseNodeState = onNextContainer Sequence $ do
|
||||||
|
@ -333,7 +333,7 @@ parseNodeState = onNextContainer Sequence $ do
|
||||||
dhtPort' <- fromInteger <$> parseInteger
|
dhtPort' <- fromInteger <$> parseInteger
|
||||||
apPort' <- fromInteger <$> parseInteger
|
apPort' <- fromInteger <$> parseInteger
|
||||||
vServer' <- parseInteger
|
vServer' <- parseInteger
|
||||||
return NodeState {
|
pure NodeState {
|
||||||
nid = nid'
|
nid = nid'
|
||||||
, domain = domain'
|
, domain = domain'
|
||||||
, dhtPort = dhtPort'
|
, dhtPort = dhtPort'
|
||||||
|
@ -348,7 +348,7 @@ parseCacheEntry :: ParseASN1 RemoteCacheEntry
|
||||||
parseCacheEntry = onNextContainer Sequence $ do
|
parseCacheEntry = onNextContainer Sequence $ do
|
||||||
node <- parseNodeState
|
node <- parseNodeState
|
||||||
timestamp <- toEnum . fromIntegral <$> parseInteger
|
timestamp <- toEnum . fromIntegral <$> parseInteger
|
||||||
return $ RemoteCacheEntry node timestamp
|
pure $ RemoteCacheEntry node timestamp
|
||||||
|
|
||||||
parseNodeCache :: ParseASN1 [RemoteCacheEntry]
|
parseNodeCache :: ParseASN1 [RemoteCacheEntry]
|
||||||
parseNodeCache = onNextContainer Sequence $ getMany parseCacheEntry
|
parseNodeCache = onNextContainer Sequence $ getMany parseCacheEntry
|
||||||
|
@ -356,14 +356,14 @@ parseNodeCache = onNextContainer Sequence $ getMany parseCacheEntry
|
||||||
parseJoinRequest :: ParseASN1 ActionPayload
|
parseJoinRequest :: ParseASN1 ActionPayload
|
||||||
parseJoinRequest = do
|
parseJoinRequest = do
|
||||||
parseNull
|
parseNull
|
||||||
return JoinRequestPayload
|
pure JoinRequestPayload
|
||||||
|
|
||||||
parseJoinResponse :: ParseASN1 ActionPayload
|
parseJoinResponse :: ParseASN1 ActionPayload
|
||||||
parseJoinResponse = onNextContainer Sequence $ do
|
parseJoinResponse = onNextContainer Sequence $ do
|
||||||
succ' <- map fromInteger <$> onNextContainer Sequence (getMany parseInteger)
|
succ' <- fmap fromInteger <$> onNextContainer Sequence (getMany parseInteger)
|
||||||
pred' <- map fromInteger <$> onNextContainer Sequence (getMany parseInteger)
|
pred' <- fmap fromInteger <$> onNextContainer Sequence (getMany parseInteger)
|
||||||
cache <- parseNodeCache
|
cache <- parseNodeCache
|
||||||
return $ JoinResponsePayload {
|
pure $ JoinResponsePayload {
|
||||||
joinSuccessors = succ'
|
joinSuccessors = succ'
|
||||||
, joinPredecessors = pred'
|
, joinPredecessors = pred'
|
||||||
, joinCache = cache
|
, joinCache = cache
|
||||||
|
@ -373,7 +373,7 @@ parseQueryIDRequest :: ParseASN1 ActionPayload
|
||||||
parseQueryIDRequest = onNextContainer Sequence $ do
|
parseQueryIDRequest = onNextContainer Sequence $ do
|
||||||
targetID <- fromInteger <$> parseInteger
|
targetID <- fromInteger <$> parseInteger
|
||||||
lBestNodes <- parseInteger
|
lBestNodes <- parseInteger
|
||||||
return $ QueryIDRequestPayload {
|
pure $ QueryIDRequestPayload {
|
||||||
queryTargetID = targetID
|
queryTargetID = targetID
|
||||||
, queryLBestNodes = lBestNodes
|
, queryLBestNodes = lBestNodes
|
||||||
}
|
}
|
||||||
|
@ -385,29 +385,29 @@ parseQueryIDResponse = onNextContainer Sequence $ do
|
||||||
0 -> FOUND <$> parseNodeState
|
0 -> FOUND <$> parseNodeState
|
||||||
1 -> FORWARD . Set.fromList <$> parseNodeCache
|
1 -> FORWARD . Set.fromList <$> parseNodeCache
|
||||||
_ -> throwParseError "invalid QueryIDResponse type"
|
_ -> throwParseError "invalid QueryIDResponse type"
|
||||||
return $ QueryIDResponsePayload {
|
pure $ QueryIDResponsePayload {
|
||||||
queryResult = result
|
queryResult = result
|
||||||
}
|
}
|
||||||
|
|
||||||
parseStabiliseRequest :: ParseASN1 ActionPayload
|
parseStabiliseRequest :: ParseASN1 ActionPayload
|
||||||
parseStabiliseRequest = do
|
parseStabiliseRequest = do
|
||||||
parseNull
|
parseNull
|
||||||
return StabiliseRequestPayload
|
pure StabiliseRequestPayload
|
||||||
|
|
||||||
parseStabiliseResponse :: ParseASN1 ActionPayload
|
parseStabiliseResponse :: ParseASN1 ActionPayload
|
||||||
parseStabiliseResponse = onNextContainer Sequence $ do
|
parseStabiliseResponse = onNextContainer Sequence $ do
|
||||||
succ' <- map fromInteger <$> onNextContainer Sequence (getMany parseInteger)
|
succ' <- fmap fromInteger <$> onNextContainer Sequence (getMany parseInteger)
|
||||||
pred' <- map fromInteger <$> onNextContainer Sequence (getMany parseInteger)
|
pred' <- fmap fromInteger <$> onNextContainer Sequence (getMany parseInteger)
|
||||||
return $ StabiliseResponsePayload {
|
pure $ StabiliseResponsePayload {
|
||||||
stabiliseSuccessors = succ'
|
stabiliseSuccessors = succ'
|
||||||
, stabilisePredecessors = pred'
|
, stabilisePredecessors = pred'
|
||||||
}
|
}
|
||||||
|
|
||||||
parseLeaveRequest :: ParseASN1 ActionPayload
|
parseLeaveRequest :: ParseASN1 ActionPayload
|
||||||
parseLeaveRequest = onNextContainer Sequence $ do
|
parseLeaveRequest = onNextContainer Sequence $ do
|
||||||
succ' <- map fromInteger <$> onNextContainer Sequence (getMany parseInteger)
|
succ' <- fmap fromInteger <$> onNextContainer Sequence (getMany parseInteger)
|
||||||
pred' <- map fromInteger <$> onNextContainer Sequence (getMany parseInteger)
|
pred' <- fmap fromInteger <$> onNextContainer Sequence (getMany parseInteger)
|
||||||
return $ LeaveRequestPayload {
|
pure $ LeaveRequestPayload {
|
||||||
leaveSuccessors = succ'
|
leaveSuccessors = succ'
|
||||||
, leavePredecessors = pred'
|
, leavePredecessors = pred'
|
||||||
}
|
}
|
||||||
|
@ -415,16 +415,16 @@ parseLeaveRequest = onNextContainer Sequence $ do
|
||||||
parseLeaveResponse :: ParseASN1 ActionPayload
|
parseLeaveResponse :: ParseASN1 ActionPayload
|
||||||
parseLeaveResponse = do
|
parseLeaveResponse = do
|
||||||
parseNull
|
parseNull
|
||||||
return LeaveResponsePayload
|
pure LeaveResponsePayload
|
||||||
|
|
||||||
parsePingRequest :: ParseASN1 ActionPayload
|
parsePingRequest :: ParseASN1 ActionPayload
|
||||||
parsePingRequest = do
|
parsePingRequest = do
|
||||||
parseNull
|
parseNull
|
||||||
return PingRequestPayload
|
pure PingRequestPayload
|
||||||
|
|
||||||
parsePingResponse :: ParseASN1 ActionPayload
|
parsePingResponse :: ParseASN1 ActionPayload
|
||||||
parsePingResponse = onNextContainer Sequence $ do
|
parsePingResponse = onNextContainer Sequence $ do
|
||||||
handledNodes <- getMany parseNodeState
|
handledNodes <- getMany parseNodeState
|
||||||
return $ PingResponsePayload {
|
pure $ PingResponsePayload {
|
||||||
pingNodeStates = handledNodes
|
pingNodeStates = handledNodes
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
{-# LANGUAGE OverloadedStrings #-}
|
|
||||||
|
|
||||||
module Hash2Pub.DHTProtocol
|
module Hash2Pub.DHTProtocol
|
||||||
( QueryResponse (..)
|
( QueryResponse (..)
|
||||||
, queryLocalCache
|
, queryLocalCache
|
||||||
|
@ -17,34 +15,26 @@ module Hash2Pub.DHTProtocol
|
||||||
)
|
)
|
||||||
where
|
where
|
||||||
|
|
||||||
import Data.Maybe (maybe, fromMaybe)
|
import qualified Data.Map as Map
|
||||||
import qualified Data.Set as Set
|
import Data.Maybe (fromMaybe, maybe)
|
||||||
import qualified Data.Map as Map
|
import qualified Data.Set as Set
|
||||||
import Data.Time.Clock.POSIX
|
import Data.Time.Clock.POSIX
|
||||||
import Network.Socket hiding (send, sendTo, recv, recvFrom)
|
import Network.Socket hiding (recv, recvFrom, send, sendTo)
|
||||||
import Network.Socket.ByteString
|
import Network.Socket.ByteString
|
||||||
|
|
||||||
import Hash2Pub.FediChord
|
import Hash2Pub.FediChord (CacheEntry (..), NodeCache, NodeID,
|
||||||
( NodeID
|
NodeState (..),
|
||||||
, NodeState (..)
|
cacheGetNodeStateUnvalidated,
|
||||||
, getSuccessors
|
cacheLookup, cacheLookupPred,
|
||||||
, putSuccessors
|
cacheLookupSucc, getPredecessors,
|
||||||
, getPredecessors
|
getSuccessors, localCompare,
|
||||||
, putPredecessors
|
putPredecessors, putSuccessors)
|
||||||
, cacheGetNodeStateUnvalidated
|
|
||||||
, NodeCache
|
|
||||||
, CacheEntry(..)
|
|
||||||
, cacheLookup
|
|
||||||
, cacheLookupSucc
|
|
||||||
, cacheLookupPred
|
|
||||||
, localCompare
|
|
||||||
)
|
|
||||||
|
|
||||||
import Debug.Trace (trace)
|
import Debug.Trace (trace)
|
||||||
|
|
||||||
-- === queries ===
|
-- === queries ===
|
||||||
|
|
||||||
data QueryResponse = FORWARD (Set.Set RemoteCacheEntry) -- ^return closest nodes from local cache.
|
data QueryResponse = FORWARD (Set.Set RemoteCacheEntry) -- ^ return closest nodes from local cache.
|
||||||
-- whole cache entry is returned for making
|
-- whole cache entry is returned for making
|
||||||
-- the entry time stamp available to the
|
-- the entry time stamp available to the
|
||||||
-- protocol serialiser
|
-- protocol serialiser
|
||||||
|
@ -83,7 +73,7 @@ queryLocalCache ownState nCache lBestNodes targetID
|
||||||
-- === protocol serialisation data types
|
-- === protocol serialisation data types
|
||||||
|
|
||||||
data Action =
|
data Action =
|
||||||
QueryID
|
QueryID
|
||||||
| Join
|
| Join
|
||||||
| Leave
|
| Leave
|
||||||
| Stabilise
|
| Stabilise
|
||||||
|
@ -92,32 +82,32 @@ data Action =
|
||||||
|
|
||||||
data FediChordMessage =
|
data FediChordMessage =
|
||||||
Request {
|
Request {
|
||||||
requestID :: Integer
|
requestID :: Integer
|
||||||
, sender :: NodeState
|
, sender :: NodeState
|
||||||
, parts :: Integer
|
, parts :: Integer
|
||||||
, part :: Integer
|
, part :: Integer
|
||||||
-- ^ part starts at 0
|
-- ^ part starts at 0
|
||||||
, action :: Action
|
, action :: Action
|
||||||
, payload :: Maybe ActionPayload
|
, payload :: Maybe ActionPayload
|
||||||
}
|
}
|
||||||
| Response {
|
| Response {
|
||||||
responseTo :: Integer
|
responseTo :: Integer
|
||||||
, senderID :: NodeID
|
, senderID :: NodeID
|
||||||
, parts :: Integer
|
, parts :: Integer
|
||||||
, part :: Integer
|
, part :: Integer
|
||||||
, action :: Action
|
, action :: Action
|
||||||
, payload :: Maybe ActionPayload
|
, payload :: Maybe ActionPayload
|
||||||
} deriving (Show, Eq)
|
} deriving (Show, Eq)
|
||||||
|
|
||||||
data ActionPayload =
|
data ActionPayload =
|
||||||
QueryIDRequestPayload {
|
QueryIDRequestPayload {
|
||||||
queryTargetID :: NodeID
|
queryTargetID :: NodeID
|
||||||
, queryLBestNodes :: Integer
|
, queryLBestNodes :: Integer
|
||||||
}
|
}
|
||||||
| JoinRequestPayload
|
| JoinRequestPayload
|
||||||
| LeaveRequestPayload {
|
| LeaveRequestPayload {
|
||||||
leaveSuccessors :: [NodeID]
|
leaveSuccessors :: [NodeID]
|
||||||
, leavePredecessors :: [NodeID]
|
, leavePredecessors :: [NodeID]
|
||||||
}
|
}
|
||||||
| StabiliseRequestPayload
|
| StabiliseRequestPayload
|
||||||
| PingRequestPayload
|
| PingRequestPayload
|
||||||
|
@ -125,14 +115,14 @@ data ActionPayload =
|
||||||
queryResult :: QueryResponse
|
queryResult :: QueryResponse
|
||||||
}
|
}
|
||||||
| JoinResponsePayload {
|
| JoinResponsePayload {
|
||||||
joinSuccessors :: [NodeID]
|
joinSuccessors :: [NodeID]
|
||||||
, joinPredecessors :: [NodeID]
|
, joinPredecessors :: [NodeID]
|
||||||
, joinCache :: [RemoteCacheEntry]
|
, joinCache :: [RemoteCacheEntry]
|
||||||
}
|
}
|
||||||
| LeaveResponsePayload
|
| LeaveResponsePayload
|
||||||
| StabiliseResponsePayload {
|
| StabiliseResponsePayload {
|
||||||
stabiliseSuccessors :: [NodeID]
|
stabiliseSuccessors :: [NodeID]
|
||||||
, stabilisePredecessors :: [NodeID]
|
, stabilisePredecessors :: [NodeID]
|
||||||
}
|
}
|
||||||
| PingResponsePayload {
|
| PingResponsePayload {
|
||||||
pingNodeStates :: [NodeState]
|
pingNodeStates :: [NodeState]
|
||||||
|
@ -170,7 +160,7 @@ addCacheEntry :: RemoteCacheEntry -- ^ a remote cache entry received from netw
|
||||||
-> IO NodeCache -- ^ new node cache with the element inserted
|
-> IO NodeCache -- ^ new node cache with the element inserted
|
||||||
addCacheEntry entry cache = do
|
addCacheEntry entry cache = do
|
||||||
now <- getPOSIXTime
|
now <- getPOSIXTime
|
||||||
return $ addCacheEntryPure now entry cache
|
pure $ addCacheEntryPure now entry cache
|
||||||
|
|
||||||
-- | pure version of 'addCacheEntry' with current time explicitly specified as argument
|
-- | pure version of 'addCacheEntry' with current time explicitly specified as argument
|
||||||
addCacheEntryPure :: POSIXTime -- ^ current time
|
addCacheEntryPure :: POSIXTime -- ^ current time
|
||||||
|
@ -196,7 +186,7 @@ deleteCacheEntry :: NodeID -- ^ID of the node to be deleted
|
||||||
deleteCacheEntry = Map.update modifier
|
deleteCacheEntry = Map.update modifier
|
||||||
where
|
where
|
||||||
modifier (ProxyEntry idPointer _) = Just (ProxyEntry idPointer Nothing)
|
modifier (ProxyEntry idPointer _) = Just (ProxyEntry idPointer Nothing)
|
||||||
modifier NodeEntry {} = Nothing
|
modifier NodeEntry {} = Nothing
|
||||||
|
|
||||||
-- | Mark a cache entry as verified after pinging it, possibly bumping its timestamp.
|
-- | Mark a cache entry as verified after pinging it, possibly bumping its timestamp.
|
||||||
markCacheEntryAsVerified :: Maybe POSIXTime -- ^ the (current) timestamp to be
|
markCacheEntryAsVerified :: Maybe POSIXTime -- ^ the (current) timestamp to be
|
||||||
|
@ -214,9 +204,9 @@ markCacheEntryAsVerified timestamp = Map.adjust adjustFunc
|
||||||
attempts :: Int -- ^ number of retries *i*
|
attempts :: Int -- ^ number of retries *i*
|
||||||
-> IO (Maybe a) -- ^ action to retry
|
-> IO (Maybe a) -- ^ action to retry
|
||||||
-> IO (Maybe a) -- ^ result after at most *i* retries
|
-> IO (Maybe a) -- ^ result after at most *i* retries
|
||||||
attempts 0 _ = return Nothing
|
attempts 0 _ = pure Nothing
|
||||||
attempts i action = do
|
attempts i action = do
|
||||||
actionResult <- action
|
actionResult <- action
|
||||||
case actionResult of
|
case actionResult of
|
||||||
Nothing -> attempts (i-1) action
|
Nothing -> attempts (i-1) action
|
||||||
Just res -> return $ Just res
|
Just res -> pure $ Just res
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
{-# LANGUAGE GeneralizedNewtypeDeriving, DataKinds, OverloadedStrings #-}
|
{-# LANGUAGE DataKinds #-}
|
||||||
|
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
{- |
|
{- |
|
||||||
Module : FediChord
|
Module : FediChord
|
||||||
Description : An opinionated implementation of the EpiChord DHT by Leong et al.
|
Description : An opinionated implementation of the EpiChord DHT by Leong et al.
|
||||||
|
@ -45,29 +47,30 @@ module Hash2Pub.FediChord (
|
||||||
, cacheWriter
|
, cacheWriter
|
||||||
) where
|
) where
|
||||||
|
|
||||||
import qualified Data.Map.Strict as Map
|
import Control.Exception
|
||||||
import Network.Socket
|
import qualified Data.Map.Strict as Map
|
||||||
import Data.Time.Clock.POSIX
|
import Data.Maybe (fromMaybe, isJust, mapMaybe)
|
||||||
import Control.Exception
|
import Data.Time.Clock.POSIX
|
||||||
import Data.Maybe (isJust, fromMaybe, mapMaybe)
|
import Network.Socket
|
||||||
|
|
||||||
-- for hashing and ID conversion
|
-- for hashing and ID conversion
|
||||||
import Crypto.Hash
|
import Control.Concurrent.STM
|
||||||
import Data.Word
|
import Control.Concurrent.STM.TQueue
|
||||||
import qualified Data.ByteString as BS
|
import Control.Monad (forever)
|
||||||
import qualified Data.ByteString.UTF8 as BSU
|
import Crypto.Hash
|
||||||
import qualified Data.ByteArray as BA
|
import qualified Data.ByteArray as BA
|
||||||
import qualified Network.ByteOrder as NetworkBytes
|
import qualified Data.ByteString as BS
|
||||||
import Data.IP (IPv6, fromHostAddress6, toHostAddress6)
|
import qualified Data.ByteString.UTF8 as BSU
|
||||||
import Data.IORef
|
import Data.IORef
|
||||||
import Control.Concurrent.STM
|
import Data.IP (IPv6, fromHostAddress6,
|
||||||
import Control.Concurrent.STM.TQueue
|
toHostAddress6)
|
||||||
import Control.Monad (forever)
|
import Data.Typeable (Typeable (..), typeOf)
|
||||||
import Data.Typeable (Typeable(..), typeOf)
|
import Data.Word
|
||||||
|
import qualified Network.ByteOrder as NetworkBytes
|
||||||
|
|
||||||
import Hash2Pub.Utils
|
import Hash2Pub.Utils
|
||||||
|
|
||||||
import Debug.Trace (trace)
|
import Debug.Trace (trace)
|
||||||
|
|
||||||
-- define protocol constants
|
-- define protocol constants
|
||||||
-- | static definition of ID length in bits
|
-- | static definition of ID length in bits
|
||||||
|
@ -120,14 +123,14 @@ a `localCompare` b
|
||||||
|
|
||||||
-- | represents a node and all its important state
|
-- | represents a node and all its important state
|
||||||
data NodeState = NodeState {
|
data NodeState = NodeState {
|
||||||
nid :: NodeID
|
nid :: NodeID
|
||||||
, domain :: String
|
, domain :: String
|
||||||
-- ^ full public domain name the node is reachable under
|
-- ^ full public domain name the node is reachable under
|
||||||
, ipAddr :: HostAddress6
|
, ipAddr :: HostAddress6
|
||||||
-- the node's public IPv6 address
|
-- the node's public IPv6 address
|
||||||
, dhtPort :: PortNumber
|
, dhtPort :: PortNumber
|
||||||
-- ^ port of the DHT itself
|
-- ^ port of the DHT itself
|
||||||
, apPort :: Maybe PortNumber
|
, apPort :: Maybe PortNumber
|
||||||
-- ^ port of the ActivityPub relay and storage service
|
-- ^ port of the ActivityPub relay and storage service
|
||||||
-- might have to be queried first
|
-- might have to be queried first
|
||||||
, vServerID :: Integer
|
, vServerID :: Integer
|
||||||
|
@ -142,32 +145,32 @@ data NodeState = NodeState {
|
||||||
|
|
||||||
-- | encapsulates all data and parameters that are not present for remote nodes
|
-- | encapsulates all data and parameters that are not present for remote nodes
|
||||||
data InternalNodeState = InternalNodeState {
|
data InternalNodeState = InternalNodeState {
|
||||||
nodeCache :: IORef NodeCache
|
nodeCache :: IORef NodeCache
|
||||||
-- ^ EpiChord node cache with expiry times for nodes
|
-- ^ EpiChord node cache with expiry times for nodes
|
||||||
-- as the map is ordered, lookups for the closes preceding node can be done using @lookupLT@.
|
-- as the map is ordered, lookups for the closes preceding node can be done using @lookupLT@.
|
||||||
-- encapsulated into an IORef for allowing concurrent reads without locking
|
-- encapsulated into an IORef for allowing concurrent reads without locking
|
||||||
, cacheWriteQueue :: TQueue (NodeCache -> NodeCache)
|
, cacheWriteQueue :: TQueue (NodeCache -> NodeCache)
|
||||||
-- ^ cache updates are not written directly to the 'nodeCache' but queued and
|
-- ^ cache updates are not written directly to the 'nodeCache' but queued and
|
||||||
-- only processed by a single writer thread to prevent lost updates.
|
-- only processed by a single writer thread to prevent lost updates.
|
||||||
-- All nodeCache modifying functions have to be partially applied enough before
|
-- All nodeCache modifying functions have to be partially applied enough before
|
||||||
-- being put into the queue.
|
-- being put into the queue.
|
||||||
--
|
--
|
||||||
, successors :: [NodeID] -- could be a set instead as these are ordered as well
|
, successors :: [NodeID] -- could be a set instead as these are ordered as well
|
||||||
-- ^ successor nodes in ascending order by distance
|
-- ^ successor nodes in ascending order by distance
|
||||||
, predecessors :: [NodeID]
|
, predecessors :: [NodeID]
|
||||||
-- ^ predecessor nodes in ascending order by distance
|
-- ^ predecessor nodes in ascending order by distance
|
||||||
----- protocol parameters -----
|
----- protocol parameters -----
|
||||||
-- TODO: evaluate moving these somewhere else
|
-- TODO: evaluate moving these somewhere else
|
||||||
, kNeighbours :: Int
|
, kNeighbours :: Int
|
||||||
-- ^ desired length of predecessor and successor list
|
-- ^ desired length of predecessor and successor list
|
||||||
-- needs to be parameterisable for simulation purposes
|
-- needs to be parameterisable for simulation purposes
|
||||||
, lNumBestNodes :: Int
|
, lNumBestNodes :: Int
|
||||||
-- ^ number of best next hops to provide
|
-- ^ number of best next hops to provide
|
||||||
-- needs to be parameterisable for simulation purposes
|
-- needs to be parameterisable for simulation purposes
|
||||||
, pNumParallelQueries :: Int
|
, pNumParallelQueries :: Int
|
||||||
-- ^ number of parallel sent queries
|
-- ^ number of parallel sent queries
|
||||||
-- needs to be parameterisable for simulation purposes
|
-- needs to be parameterisable for simulation purposes
|
||||||
, jEntriesPerSlice :: Int
|
, jEntriesPerSlice :: Int
|
||||||
-- ^ number of desired entries per cache slice
|
-- ^ number of desired entries per cache slice
|
||||||
-- needs to be parameterisable for simulation purposes
|
-- needs to be parameterisable for simulation purposes
|
||||||
} deriving (Show, Eq)
|
} deriving (Show, Eq)
|
||||||
|
@ -248,10 +251,10 @@ data ProxyDirection = Backwards | Forwards deriving (Show, Eq)
|
||||||
|
|
||||||
instance Enum ProxyDirection where
|
instance Enum ProxyDirection where
|
||||||
toEnum (-1) = Backwards
|
toEnum (-1) = Backwards
|
||||||
toEnum 1 = Forwards
|
toEnum 1 = Forwards
|
||||||
toEnum _ = error "no such ProxyDirection"
|
toEnum _ = error "no such ProxyDirection"
|
||||||
fromEnum Backwards = - 1
|
fromEnum Backwards = - 1
|
||||||
fromEnum Forwards = 1
|
fromEnum Forwards = 1
|
||||||
|
|
||||||
--- useful function for getting entries for a full cache transfer
|
--- useful function for getting entries for a full cache transfer
|
||||||
cacheEntries :: NodeCache -> [CacheEntry]
|
cacheEntries :: NodeCache -> [CacheEntry]
|
||||||
|
@ -272,7 +275,7 @@ cacheLookup :: NodeID -- ^lookup key
|
||||||
-> Maybe CacheEntry
|
-> Maybe CacheEntry
|
||||||
cacheLookup key cache = case Map.lookup key cache of
|
cacheLookup key cache = case Map.lookup key cache of
|
||||||
Just (ProxyEntry _ result) -> result
|
Just (ProxyEntry _ result) -> result
|
||||||
res -> res
|
res -> res
|
||||||
|
|
||||||
-- | a wrapper around lookup functions, making the lookup redirectable by a @ProxyEntry@
|
-- | a wrapper around lookup functions, making the lookup redirectable by a @ProxyEntry@
|
||||||
-- to simulate a modular ring
|
-- to simulate a modular ring
|
||||||
|
@ -280,12 +283,12 @@ lookupWrapper :: (NodeID -> NodeCache -> Maybe (NodeID, CacheEntry)) -> (NodeID
|
||||||
lookupWrapper f fRepeat direction key cache =
|
lookupWrapper f fRepeat direction key cache =
|
||||||
case f key cache of
|
case f key cache of
|
||||||
-- the proxy entry found holds a
|
-- the proxy entry found holds a
|
||||||
Just (_, (ProxyEntry _ (Just entry@NodeEntry{}))) -> Just entry
|
Just (_, ProxyEntry _ (Just entry@NodeEntry{})) -> Just entry
|
||||||
-- proxy entry holds another proxy entry, this should not happen
|
-- proxy entry holds another proxy entry, this should not happen
|
||||||
Just (_, (ProxyEntry _ (Just (ProxyEntry _ _)))) -> Nothing
|
Just (_, ProxyEntry _ (Just (ProxyEntry _ _))) -> Nothing
|
||||||
-- proxy entry without own entry is a pointer on where to continue
|
-- proxy entry without own entry is a pointer on where to continue
|
||||||
-- if lookup direction is the same as pointer direction: follow pointer
|
-- if lookup direction is the same as pointer direction: follow pointer
|
||||||
Just (foundKey, (ProxyEntry (pointerID, pointerDirection) Nothing)) ->
|
Just (foundKey, ProxyEntry (pointerID, pointerDirection) Nothing) ->
|
||||||
let newKey = if pointerDirection == direction
|
let newKey = if pointerDirection == direction
|
||||||
then pointerID
|
then pointerID
|
||||||
else foundKey + (fromInteger . toInteger . fromEnum $ direction)
|
else foundKey + (fromInteger . toInteger . fromEnum $ direction)
|
||||||
|
@ -322,17 +325,17 @@ cacheLookupPred = lookupWrapper Map.lookupLT Map.lookupLE Backwards
|
||||||
cacheGetNodeStateUnvalidated :: CacheEntry -> NodeState
|
cacheGetNodeStateUnvalidated :: CacheEntry -> NodeState
|
||||||
cacheGetNodeStateUnvalidated (NodeEntry _ nState _) = nState
|
cacheGetNodeStateUnvalidated (NodeEntry _ nState _) = nState
|
||||||
cacheGetNodeStateUnvalidated (ProxyEntry _ (Just entry)) = cacheGetNodeStateUnvalidated entry
|
cacheGetNodeStateUnvalidated (ProxyEntry _ (Just entry)) = cacheGetNodeStateUnvalidated entry
|
||||||
cacheGetNodeStateUnvalidated _ = error "trying to return empty node state, please report a bug"
|
cacheGetNodeStateUnvalidated _ = error "trying to pure empty node state, please report a bug"
|
||||||
|
|
||||||
-- | converts a 'HostAddress6' IP address to a big-endian strict ByteString
|
-- | converts a 'HostAddress6' IP address to a big-endian strict ByteString
|
||||||
ipAddrAsBS :: HostAddress6 -> BS.ByteString
|
ipAddrAsBS :: HostAddress6 -> BS.ByteString
|
||||||
ipAddrAsBS (a, b, c, d) = mconcat $ map NetworkBytes.bytestring32 [a, b, c, d]
|
ipAddrAsBS (a, b, c, d) = mconcat $ fmap NetworkBytes.bytestring32 [a, b, c, d]
|
||||||
|
|
||||||
-- | converts a ByteString in big endian order to an IPv6 address 'HostAddress6'
|
-- | converts a ByteString in big endian order to an IPv6 address 'HostAddress6'
|
||||||
bsAsIpAddr :: BS.ByteString -> HostAddress6
|
bsAsIpAddr :: BS.ByteString -> HostAddress6
|
||||||
bsAsIpAddr bytes = (a,b,c,d)
|
bsAsIpAddr bytes = (a,b,c,d)
|
||||||
where
|
where
|
||||||
a:b:c:d:_ = map NetworkBytes.word32 . chunkBytes 4 $ bytes
|
a:b:c:d:_ = fmap NetworkBytes.word32 . chunkBytes 4 $ bytes
|
||||||
|
|
||||||
|
|
||||||
-- | generates a 256 bit long NodeID using SHAKE128, represented as ByteString
|
-- | generates a 256 bit long NodeID using SHAKE128, represented as ByteString
|
||||||
|
@ -344,7 +347,7 @@ genNodeIDBS ip nodeDomain vserver =
|
||||||
hashIpaddrUpper `BS.append` hashID nodeDomain' `BS.append` hashIpaddLower
|
hashIpaddrUpper `BS.append` hashID nodeDomain' `BS.append` hashIpaddLower
|
||||||
where
|
where
|
||||||
vsBS = BS.pack [vserver] -- attention: only works for vserver IDs up to 255
|
vsBS = BS.pack [vserver] -- attention: only works for vserver IDs up to 255
|
||||||
ipaddrNet = (BS.take 8 $ ipAddrAsBS ip) `BS.append` vsBS
|
ipaddrNet = BS.take 8 (ipAddrAsBS ip) `BS.append` vsBS
|
||||||
nodeDomain' = BSU.fromString nodeDomain `BS.append` vsBS
|
nodeDomain' = BSU.fromString nodeDomain `BS.append` vsBS
|
||||||
hashID bstr = BS.pack . BA.unpack $ (hash bstr :: Digest (SHAKE128 128))
|
hashID bstr = BS.pack . BA.unpack $ (hash bstr :: Digest (SHAKE128 128))
|
||||||
(hashIpaddrUpper, hashIpaddLower) = BS.splitAt 64 $ hashID ipaddrNet
|
(hashIpaddrUpper, hashIpaddLower) = BS.splitAt 64 $ hashID ipaddrNet
|
||||||
|
@ -379,7 +382,7 @@ byteStringToUInteger bs = sum $ parsedBytes 0 bs
|
||||||
Just (bs', w) -> parseWithOffset offset w : parsedBytes (offset+1) bs'
|
Just (bs', w) -> parseWithOffset offset w : parsedBytes (offset+1) bs'
|
||||||
|
|
||||||
parseWithOffset :: Integer -> Word8 -> Integer
|
parseWithOffset :: Integer -> Word8 -> Integer
|
||||||
parseWithOffset 0 word = toInteger word -- a shift of 0 is always 0
|
parseWithOffset 0 word = toInteger word -- a shift of 0 is always 0
|
||||||
parseWithOffset offset word = toInteger word * 2^(8 * offset)
|
parseWithOffset offset word = toInteger word * 2^(8 * offset)
|
||||||
|
|
||||||
|
|
||||||
|
@ -391,7 +394,7 @@ byteStringToUInteger bs = sum $ parsedBytes 0 bs
|
||||||
--checkCacheSlices :: NodeState -> IO [()]
|
--checkCacheSlices :: NodeState -> IO [()]
|
||||||
--checkCacheSlices state = case getNodeCache state of
|
--checkCacheSlices state = case getNodeCache state of
|
||||||
-- -- don't do anything on nodes without a cache
|
-- -- don't do anything on nodes without a cache
|
||||||
-- Nothing -> return [()]
|
-- Nothing -> pure [()]
|
||||||
-- Just cache' -> checkSlice jEntries (nid state) startBound lastSucc =<< readIORef cache'
|
-- Just cache' -> checkSlice jEntries (nid state) startBound lastSucc =<< readIORef cache'
|
||||||
-- -- TODO: do the same for predecessors
|
-- -- TODO: do the same for predecessors
|
||||||
-- where
|
-- where
|
||||||
|
@ -402,7 +405,7 @@ byteStringToUInteger bs = sum $ parsedBytes 0 bs
|
||||||
-- checkSlice _ _ _ Nothing _ = []
|
-- checkSlice _ _ _ Nothing _ = []
|
||||||
-- checkSlice j ownID upperBound (Just lastSuccNode) cache
|
-- checkSlice j ownID upperBound (Just lastSuccNode) cache
|
||||||
-- | upperBound < lastSuccNode = []
|
-- | upperBound < lastSuccNode = []
|
||||||
-- | otherwise =
|
-- | otherwise =
|
||||||
-- -- continuously half the DHT namespace, take the upper part as a slice,
|
-- -- continuously half the DHT namespace, take the upper part as a slice,
|
||||||
-- -- check for existing entries in that slice and create a lookup action
|
-- -- check for existing entries in that slice and create a lookup action
|
||||||
-- -- and recursively do this on the lower half.
|
-- -- and recursively do this on the lower half.
|
||||||
|
@ -415,10 +418,10 @@ byteStringToUInteger bs = sum $ parsedBytes 0 bs
|
||||||
-- -- TODO: replace empty IO actions with actual lookups to middle of slice
|
-- -- TODO: replace empty IO actions with actual lookups to middle of slice
|
||||||
-- -- TODO: validate ID before adding to cache
|
-- -- TODO: validate ID before adding to cache
|
||||||
-- case Map.lookupLT upperBound cache of
|
-- case Map.lookupLT upperBound cache of
|
||||||
-- Nothing -> return () : checkSlice j ownID lowerBound (Just lastSuccNode) cache
|
-- Nothing -> pure () : checkSlice j ownID lowerBound (Just lastSuccNode) cache
|
||||||
-- Just (matchID, _) ->
|
-- Just (matchID, _) ->
|
||||||
-- if
|
-- if
|
||||||
-- matchID <= lowerBound then return () : checkSlice j ownID lowerBound (Just lastSuccNode) cache
|
-- matchID <= lowerBound then pure () : checkSlice j ownID lowerBound (Just lastSuccNode) cache
|
||||||
-- else
|
-- else
|
||||||
-- checkSlice j ownID lowerBound (Just lastSuccNode) cache
|
-- checkSlice j ownID lowerBound (Just lastSuccNode) cache
|
||||||
|
|
||||||
|
@ -428,8 +431,8 @@ byteStringToUInteger bs = sum $ parsedBytes 0 bs
|
||||||
|
|
||||||
-- | configuration values used for initialising the FediChord DHT
|
-- | configuration values used for initialising the FediChord DHT
|
||||||
data FediChordConf = FediChordConf {
|
data FediChordConf = FediChordConf {
|
||||||
confDomain :: String
|
confDomain :: String
|
||||||
, confIP :: HostAddress6
|
, confIP :: HostAddress6
|
||||||
, confDhtPort :: Int
|
, confDhtPort :: Int
|
||||||
} deriving (Show, Eq)
|
} deriving (Show, Eq)
|
||||||
|
|
||||||
|
@ -439,7 +442,7 @@ fediChordInit :: FediChordConf -> IO (Socket, NodeState)
|
||||||
fediChordInit conf = do
|
fediChordInit conf = do
|
||||||
initialState <- nodeStateInit conf
|
initialState <- nodeStateInit conf
|
||||||
serverSock <- mkServerSocket (ipAddr initialState) (dhtPort initialState)
|
serverSock <- mkServerSocket (ipAddr initialState) (dhtPort initialState)
|
||||||
return (serverSock, initialState)
|
pure (serverSock, initialState)
|
||||||
|
|
||||||
-- | initialises the 'NodeState' for this local node.
|
-- | initialises the 'NodeState' for this local node.
|
||||||
-- Separated from 'fediChordInit' to be usable in tests.
|
-- Separated from 'fediChordInit' to be usable in tests.
|
||||||
|
@ -467,7 +470,7 @@ nodeStateInit conf = do
|
||||||
, pNumParallelQueries = 2
|
, pNumParallelQueries = 2
|
||||||
, jEntriesPerSlice = 2
|
, jEntriesPerSlice = 2
|
||||||
}
|
}
|
||||||
return initialState
|
pure initialState
|
||||||
|
|
||||||
--fediChordJoin :: NodeState -- ^ the local 'NodeState'
|
--fediChordJoin :: NodeState -- ^ the local 'NodeState'
|
||||||
-- -> (String, PortNumber) -- ^ domain and port of a bootstrapping node
|
-- -> (String, PortNumber) -- ^ domain and port of a bootstrapping node
|
||||||
|
@ -486,13 +489,13 @@ cacheWriter :: NodeState -> IO ()
|
||||||
cacheWriter ns = do
|
cacheWriter ns = do
|
||||||
let writeQueue' = getCacheWriteQueue ns
|
let writeQueue' = getCacheWriteQueue ns
|
||||||
case writeQueue' of
|
case writeQueue' of
|
||||||
Nothing -> return ()
|
Nothing -> pure ()
|
||||||
Just writeQueue -> forever $ do
|
Just writeQueue -> forever $ do
|
||||||
f <- atomically $ readTQueue writeQueue
|
f <- atomically $ readTQueue writeQueue
|
||||||
let
|
let
|
||||||
refModifier :: NodeCache -> (NodeCache, ())
|
refModifier :: NodeCache -> (NodeCache, ())
|
||||||
refModifier nc = (f nc, ())
|
refModifier nc = (f nc, ())
|
||||||
maybe (return ()) (
|
maybe (pure ()) (
|
||||||
\ref -> atomicModifyIORef' ref refModifier
|
\ref -> atomicModifyIORef' ref refModifier
|
||||||
) $ getNodeCacheRef ns
|
) $ getNodeCacheRef ns
|
||||||
|
|
||||||
|
@ -518,7 +521,7 @@ mkServerSocket ip port = do
|
||||||
sock <- socket AF_INET6 Datagram defaultProtocol
|
sock <- socket AF_INET6 Datagram defaultProtocol
|
||||||
setSocketOption sock IPv6Only 1
|
setSocketOption sock IPv6Only 1
|
||||||
bind sock sockAddr
|
bind sock sockAddr
|
||||||
return sock
|
pure sock
|
||||||
|
|
||||||
-- | create a UDP datagram socket, connected to a destination.
|
-- | create a UDP datagram socket, connected to a destination.
|
||||||
-- The socket gets an arbitrary free local port assigned.
|
-- The socket gets an arbitrary free local port assigned.
|
||||||
|
@ -529,4 +532,4 @@ mkSendSocket dest destPort = do
|
||||||
destAddr <- addrAddress <$> resolve (Just dest) (Just destPort)
|
destAddr <- addrAddress <$> resolve (Just dest) (Just destPort)
|
||||||
sendSock <- socket AF_INET6 Datagram defaultProtocol
|
sendSock <- socket AF_INET6 Datagram defaultProtocol
|
||||||
setSocketOption sendSock IPv6Only 1
|
setSocketOption sendSock IPv6Only 1
|
||||||
return sendSock
|
pure sendSock
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
module Main where
|
module Main where
|
||||||
|
|
||||||
import System.Environment
|
import Control.Concurrent
|
||||||
import Data.IP (IPv6, toHostAddress6) -- iproute, just for IPv6 string parsing
|
import Data.IP (IPv6, toHostAddress6)
|
||||||
import Control.Concurrent
|
import System.Environment
|
||||||
|
|
||||||
import Hash2Pub.FediChord
|
import Hash2Pub.FediChord
|
||||||
|
|
||||||
main :: IO ()
|
main :: IO ()
|
||||||
main = do
|
main = do
|
||||||
|
@ -20,12 +20,12 @@ main = do
|
||||||
-- idea: list of bootstrapping nodes, try joining within a timeout
|
-- idea: list of bootstrapping nodes, try joining within a timeout
|
||||||
-- stop main thread from terminating during development
|
-- stop main thread from terminating during development
|
||||||
getChar
|
getChar
|
||||||
return ()
|
pure ()
|
||||||
|
|
||||||
readConfig :: IO FediChordConf
|
readConfig :: IO FediChordConf
|
||||||
readConfig = do
|
readConfig = do
|
||||||
confDomainString : ipString : portString : _ <- getArgs
|
confDomainString : ipString : portString : _ <- getArgs
|
||||||
return $ FediChordConf {
|
pure $ FediChordConf {
|
||||||
confDomain = confDomainString
|
confDomain = confDomainString
|
||||||
, confIP = toHostAddress6 . read $ ipString
|
, confIP = toHostAddress6 . read $ ipString
|
||||||
, confDhtPort = read portString
|
, confDhtPort = read portString
|
||||||
|
|
|
@ -2,11 +2,11 @@
|
||||||
module Hash2Pub.Utils where
|
module Hash2Pub.Utils where
|
||||||
|
|
||||||
import qualified Data.ByteString as BS
|
import qualified Data.ByteString as BS
|
||||||
import qualified Data.Set as Set
|
import qualified Data.Set as Set
|
||||||
|
|
||||||
-- |wraps a list into a Maybe, by replacing empty lists with Nothing
|
-- |wraps a list into a Maybe, by replacing empty lists with Nothing
|
||||||
maybeEmpty :: [a] -> Maybe [a]
|
maybeEmpty :: [a] -> Maybe [a]
|
||||||
maybeEmpty [] = Nothing
|
maybeEmpty [] = Nothing
|
||||||
maybeEmpty nonemptyList = Just nonemptyList
|
maybeEmpty nonemptyList = Just nonemptyList
|
||||||
|
|
||||||
-- | Chop a list into sublists of i elements. The last sublist might contain
|
-- | Chop a list into sublists of i elements. The last sublist might contain
|
||||||
|
@ -15,7 +15,7 @@ chunksOf :: Int -> [a] -> [[a]]
|
||||||
chunksOf i xs =
|
chunksOf i xs =
|
||||||
case splitAt i xs of
|
case splitAt i xs of
|
||||||
(a, []) -> [a]
|
(a, []) -> [a]
|
||||||
(a, b) -> a : chunksOf i b
|
(a, b) -> a : chunksOf i b
|
||||||
|
|
||||||
|
|
||||||
-- | Chop a 'BS.ByteString' into list of substrings of i elements. The last
|
-- | Chop a 'BS.ByteString' into list of substrings of i elements. The last
|
||||||
|
@ -24,7 +24,7 @@ chunkBytes :: Int -> BS.ByteString -> [BS.ByteString]
|
||||||
chunkBytes i xs =
|
chunkBytes i xs =
|
||||||
case BS.splitAt i xs of
|
case BS.splitAt i xs of
|
||||||
(a, "") -> [a]
|
(a, "") -> [a]
|
||||||
(a, b) -> a : chunkBytes i b
|
(a, b) -> a : chunkBytes i b
|
||||||
|
|
||||||
-- | Chop a 'Set.Set' into a list of disjuct subsets of i elements. The last
|
-- | Chop a 'Set.Set' into a list of disjuct subsets of i elements. The last
|
||||||
-- subset might contain less than i elements.
|
-- subset might contain less than i elements.
|
||||||
|
|
|
@ -1,20 +1,20 @@
|
||||||
{-# LANGUAGE OverloadedStrings #-}
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
module Main where
|
module Main where
|
||||||
|
|
||||||
import qualified Data.ASN1.Encoding as ASN1 -- asn1-encoding package
|
|
||||||
import qualified Data.ASN1.BinaryEncoding as ASN1
|
import qualified Data.ASN1.BinaryEncoding as ASN1
|
||||||
import qualified Data.ASN1.Error as ASN1
|
import qualified Data.ASN1.Encoding as ASN1
|
||||||
import qualified Data.ASN1.Types as ASN1 -- asn1-types package
|
import qualified Data.ASN1.Error as ASN1
|
||||||
import qualified Data.ASN1.Parse as ASN1P
|
import qualified Data.ASN1.Parse as ASN1P
|
||||||
import qualified Data.ByteString as BS
|
import qualified Data.ASN1.Types as ASN1
|
||||||
import Data.Maybe (fromMaybe)
|
import qualified Data.ByteString as BS
|
||||||
import Debug.Trace (trace)
|
import Data.Maybe (fromMaybe)
|
||||||
|
import Debug.Trace (trace)
|
||||||
|
|
||||||
-- import Hash2Pub.Fedichord
|
-- import Hash2Pub.Fedichord
|
||||||
|
|
||||||
-- encoding values as ASN.1 types is done using Data.ASN1.Prim
|
-- encoding values as ASN.1 types is done using Data.ASN1.Prim
|
||||||
someASN1 :: [ASN1.ASN1]
|
someASN1 :: [ASN1.ASN1]
|
||||||
someASN1 = ASN1.Start ASN1.Sequence : ASN1.ASN1String (ASN1.asn1CharacterString ASN1.Visible domain) : ASN1.ASN1String (ASN1.asn1CharacterString ASN1.UTF8 unicode) : map ASN1.IntVal xs ++ [ASN1.End ASN1.Sequence]
|
someASN1 = ASN1.Start ASN1.Sequence : ASN1.ASN1String (ASN1.asn1CharacterString ASN1.Visible domain) : ASN1.ASN1String (ASN1.asn1CharacterString ASN1.UTF8 unicode) : fmap ASN1.IntVal xs <> [ASN1.End ASN1.Sequence]
|
||||||
where
|
where
|
||||||
domain = "domains.are.ascii.on.ly"
|
domain = "domains.are.ascii.on.ly"
|
||||||
unicode = "Hähä, but unicode string!"
|
unicode = "Hähä, but unicode string!"
|
||||||
|
@ -27,12 +27,12 @@ derToAsn1 :: BS.ByteString -> Either ASN1.ASN1Error [ASN1.ASN1]
|
||||||
derToAsn1 = ASN1.decodeASN1' ASN1.DER
|
derToAsn1 = ASN1.decodeASN1' ASN1.DER
|
||||||
|
|
||||||
getUnicodeField :: [ASN1.ASN1] -> String
|
getUnicodeField :: [ASN1.ASN1] -> String
|
||||||
getUnicodeField ((ASN1.Start ASN1.Sequence) : _ : (ASN1.ASN1String strASN1) : _) = fromMaybe "" $ ASN1.asn1CharacterToString strASN1
|
getUnicodeField (ASN1.Start ASN1.Sequence : _ : ASN1.ASN1String strASN1 : _) = fromMaybe "" $ ASN1.asn1CharacterToString strASN1
|
||||||
|
|
||||||
testParser :: ASN1P.ParseASN1 String
|
testParser :: ASN1P.ParseASN1 String
|
||||||
testParser = do
|
testParser = do
|
||||||
foo <- ASN1P.onNextContainer ASN1.Sequence getAll
|
foo <- ASN1P.onNextContainer ASN1.Sequence getAll
|
||||||
return $ show foo
|
pure $ show foo
|
||||||
|
|
||||||
getAll :: ASN1P.ParseASN1 [ASN1.ASN1]
|
getAll :: ASN1P.ParseASN1 [ASN1.ASN1]
|
||||||
getAll = ASN1P.getMany ASN1P.getNext
|
getAll = ASN1P.getMany ASN1P.getNext
|
||||||
|
|
32
stylish.sh
Executable file
32
stylish.sh
Executable file
|
@ -0,0 +1,32 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
function ls-source-files {
|
||||||
|
git ls-files "app/*.hs" "src/*.hs" "test/*.hs"
|
||||||
|
}
|
||||||
|
|
||||||
|
function check-git-status {
|
||||||
|
[ "$(git status -s '*.hs' | wc -l)" == "0" ]
|
||||||
|
}
|
||||||
|
|
||||||
|
function stylish {
|
||||||
|
stylish-haskell -i $(ls-source-files)
|
||||||
|
}
|
||||||
|
|
||||||
|
if check-git-status
|
||||||
|
then
|
||||||
|
echo "Running stylish-haskell..."
|
||||||
|
stylish
|
||||||
|
echo "Done."
|
||||||
|
if check-git-status
|
||||||
|
then
|
||||||
|
echo "OK, impeccable style."
|
||||||
|
else
|
||||||
|
echo "KO! Lack of style on those files:"
|
||||||
|
git status -sb
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "git status not clean, aborting"
|
||||||
|
fi
|
|
@ -1,20 +1,20 @@
|
||||||
{-# LANGUAGE OverloadedStrings #-}
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
module FediChordSpec where
|
module FediChordSpec where
|
||||||
|
|
||||||
import Test.Hspec
|
import Control.Exception
|
||||||
import Control.Exception
|
import Data.ASN1.Parse (runParseASN1)
|
||||||
import Network.Socket
|
import qualified Data.ByteString as BS
|
||||||
import Data.Maybe (fromJust)
|
import Data.IORef
|
||||||
import qualified Data.Map.Strict as Map
|
import qualified Data.Map.Strict as Map
|
||||||
import qualified Data.ByteString as BS
|
import Data.Maybe (fromJust)
|
||||||
import qualified Data.Set as Set
|
import qualified Data.Set as Set
|
||||||
import Data.ASN1.Parse (runParseASN1)
|
import Data.Time.Clock.POSIX
|
||||||
import Data.Time.Clock.POSIX
|
import Network.Socket
|
||||||
import Data.IORef
|
import Test.Hspec
|
||||||
|
|
||||||
import Hash2Pub.FediChord
|
import Hash2Pub.ASN1Coding
|
||||||
import Hash2Pub.DHTProtocol
|
import Hash2Pub.DHTProtocol
|
||||||
import Hash2Pub.ASN1Coding
|
import Hash2Pub.FediChord
|
||||||
|
|
||||||
spec :: Spec
|
spec :: Spec
|
||||||
spec = do
|
spec = do
|
||||||
|
@ -96,19 +96,19 @@ spec = do
|
||||||
-- ignore empty proxy elements in initial cache
|
-- ignore empty proxy elements in initial cache
|
||||||
nid . cacheGetNodeStateUnvalidated <$> cacheLookupPred (exampleID + 10) emptyCache `shouldBe` Nothing
|
nid . cacheGetNodeStateUnvalidated <$> cacheLookupPred (exampleID + 10) emptyCache `shouldBe` Nothing
|
||||||
nid . cacheGetNodeStateUnvalidated <$> cacheLookupSucc exampleID emptyCache `shouldBe` Nothing
|
nid . cacheGetNodeStateUnvalidated <$> cacheLookupSucc exampleID emptyCache `shouldBe` Nothing
|
||||||
|
|
||||||
-- given situation: 0 < anotherNode < nid exampleLocalNode < maxBound
|
-- given situation: 0 < anotherNode < nid exampleLocalNode < maxBound
|
||||||
-- first try non-modular queries between the 2 stored nodes
|
-- first try non-modular queries between the 2 stored nodes
|
||||||
nid . cacheGetNodeStateUnvalidated <$> cacheLookupPred (exampleID + 10) newCache `shouldBe` Just exampleID
|
nid . cacheGetNodeStateUnvalidated <$> cacheLookupPred (exampleID + 10) newCache `shouldBe` Just exampleID
|
||||||
nid . cacheGetNodeStateUnvalidated <$> cacheLookupSucc exampleID newCache `shouldBe` Just exampleID
|
nid . cacheGetNodeStateUnvalidated <$> cacheLookupSucc exampleID newCache `shouldBe` Just exampleID
|
||||||
nid . cacheGetNodeStateUnvalidated <$> cacheLookupSucc (exampleID + 10) newCache `shouldBe` Just anotherID
|
nid . cacheGetNodeStateUnvalidated <$> cacheLookupSucc (exampleID + 10) newCache `shouldBe` Just anotherID
|
||||||
-- queries that require a (pseudo)modular structure
|
-- queries that require a (pseudo)modular structure
|
||||||
nid . cacheGetNodeStateUnvalidated <$> cacheLookupPred (exampleID - 2) newCache `shouldBe` Just anotherID
|
nid . cacheGetNodeStateUnvalidated <$> cacheLookupPred (exampleID - 2) newCache `shouldBe` Just anotherID
|
||||||
nid . cacheGetNodeStateUnvalidated <$> cacheLookupSucc (anotherID + 2) newCache `shouldBe` Just exampleID
|
nid . cacheGetNodeStateUnvalidated <$> cacheLookupSucc (anotherID + 2) newCache `shouldBe` Just exampleID
|
||||||
-- now store a node in one of the ProxyEntries
|
-- now store a node in one of the ProxyEntries
|
||||||
let cacheWithProxyNodeEntry = addCacheEntryPure 10 (RemoteCacheEntry maxNode 10) newCache
|
let cacheWithProxyNodeEntry = addCacheEntryPure 10 (RemoteCacheEntry maxNode 10) newCache
|
||||||
nid . cacheGetNodeStateUnvalidated <$> cacheLookupPred (exampleID - 2) cacheWithProxyNodeEntry `shouldBe` Just maxBound
|
nid . cacheGetNodeStateUnvalidated <$> cacheLookupPred (exampleID - 2) cacheWithProxyNodeEntry `shouldBe` Just maxBound
|
||||||
nid . cacheGetNodeStateUnvalidated <$> cacheLookupSucc (anotherID + 2) cacheWithProxyNodeEntry `shouldBe` Just maxBound
|
nid . cacheGetNodeStateUnvalidated <$> cacheLookupSucc (anotherID + 2) cacheWithProxyNodeEntry `shouldBe` Just maxBound
|
||||||
it "entries can be deleted" $ do
|
it "entries can be deleted" $ do
|
||||||
let
|
let
|
||||||
nC = addCacheEntryPure 10 (RemoteCacheEntry maxNode 10) newCache
|
nC = addCacheEntryPure 10 (RemoteCacheEntry maxNode 10) newCache
|
||||||
|
@ -123,7 +123,7 @@ spec = do
|
||||||
nid1 = toNodeID 2^(23::Integer)+1
|
nid1 = toNodeID 2^(23::Integer)+1
|
||||||
node1 = do
|
node1 = do
|
||||||
eln <- exampleLocalNode -- is at 2^23.00000017198264 = 8388609
|
eln <- exampleLocalNode -- is at 2^23.00000017198264 = 8388609
|
||||||
return $ putPredecessors [nid4] $ eln {nid = nid1}
|
pure $ putPredecessors [nid4] $ eln {nid = nid1}
|
||||||
nid2 = toNodeID 2^(230::Integer)+12
|
nid2 = toNodeID 2^(230::Integer)+12
|
||||||
node2 = exampleNodeState { nid = nid2}
|
node2 = exampleNodeState { nid = nid2}
|
||||||
nid3 = toNodeID 2^(25::Integer)+10
|
nid3 = toNodeID 2^(25::Integer)+10
|
||||||
|
@ -156,7 +156,7 @@ spec = do
|
||||||
describe "Messages can be encoded to and decoded from ASN.1" $ do
|
describe "Messages can be encoded to and decoded from ASN.1" $ do
|
||||||
-- define test messages
|
-- define test messages
|
||||||
let
|
let
|
||||||
someNodeIDs = map fromInteger [3..12]
|
someNodeIDs = fmap fromInteger [3..12]
|
||||||
qidReqPayload = QueryIDRequestPayload {
|
qidReqPayload = QueryIDRequestPayload {
|
||||||
queryTargetID = nid exampleNodeState
|
queryTargetID = nid exampleNodeState
|
||||||
, queryLBestNodes = 3
|
, queryLBestNodes = 3
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
module Main (main) where
|
module Main (main) where
|
||||||
|
|
||||||
import Test.Hspec
|
|
||||||
import qualified FediChordSpec
|
import qualified FediChordSpec
|
||||||
|
import Test.Hspec
|
||||||
|
|
||||||
main :: IO ()
|
main :: IO ()
|
||||||
main = hspec $ do
|
main = hspec $
|
||||||
describe "FediChord tests" FediChordSpec.spec
|
describe "FediChord tests" FediChordSpec.spec
|
||||||
|
|
Loading…
Reference in a new issue