Compare commits

...

16 Commits

Author SHA1 Message Date
10b44cdf72 add of changes
All checks were successful
/ mirror (push) Successful in 5s
2025-07-07 22:07:02 +01:00
4cc9e8b2d2 add of gsm settings
All checks were successful
/ mirror (push) Successful in 4s
2025-07-07 21:53:01 +01:00
1d5eae7d80 add of settings 2025-07-07 20:42:17 +01:00
d3d14919a8 add fix for samples
All checks were successful
/ mirror (push) Successful in 4s
2025-07-07 14:05:07 +01:00
8b6ba00d8c add of changes
All checks were successful
/ mirror (push) Successful in 4s
2025-07-07 12:00:29 +01:00
c4610fbcb9 add
All checks were successful
/ mirror (push) Successful in 4s
2025-07-07 11:03:22 +01:00
STCB
96553b27bd Merge remote-tracking branch 'origin/ProtcoleImplement' into ProtcoleImplement
All checks were successful
/ mirror (push) Successful in 4s
2025-07-07 00:02:39 +02:00
STCB
4832ba751f Cleaning a bit more 2025-07-07 00:00:05 +02:00
a6cd9632ee Cleaning a bit
All checks were successful
/ mirror (push) Successful in 5s
2025-07-06 23:36:41 +02:00
6b517f6a46 add playback
All checks were successful
/ mirror (push) Successful in 5s
2025-07-06 17:13:53 +01:00
a14084ce68 add
All checks were successful
/ mirror (push) Successful in 5s
2025-07-04 23:03:14 +01:00
5c274817df add
Some checks failed
/ mirror (push) Failing after 5s
/ build-stealth (push) Failing after 5m0s
/ build (push) Failing after 5m2s
2025-07-04 23:01:46 +01:00
8f81049822 add of drybox
Some checks failed
/ mirror (push) Failing after 3s
2025-07-04 22:57:36 +01:00
75f54dc90a add protocole into drybox
Some checks failed
/ mirror (push) Failing after 4s
2025-06-15 11:59:27 +01:00
0badc8862c feat: noise java lib | WIP NoiseHandler IK handshake
Some checks failed
/ mirror (push) Failing after 4s
/ build-stealth (push) Successful in 10m8s
/ build (push) Successful in 10m12s
2025-06-14 11:29:22 +03:00
9ef3ad5b56 feat: ED25519 keypair instead of P256
Some checks failed
/ build (push) Successful in 10m0s
/ build-stealth (push) Successful in 10m3s
/ mirror (push) Failing after 5s
2025-06-10 16:25:31 +03:00
56 changed files with 9064 additions and 741 deletions

View File

@ -24,7 +24,7 @@ android {
applicationId = "com.icing.dialer"
// You can update the following values to match your application needs.
// For more information, see: https://flutter.dev/to/review-gradle-config.
minSdk = flutter.minSdkVersion
minSdk = 23
targetSdk = flutter.targetSdkVersion
versionCode = flutter.versionCode
versionName = flutter.versionName
@ -42,3 +42,19 @@ android {
flutter {
source = "../.."
}
dependencies {
implementation files('libs/noise-java-1.0.jar')
// Audio processing and DSP
implementation 'com.github.wendykierp:JTransforms:3.1'
// Apache Commons Math for signal processing
implementation 'org.apache.commons:commons-math3:3.6.1'
// Audio codec - Opus for Android
implementation 'com.github.theeasiestway:android-opus-codec:1.0.3'
// Kotlin Coroutines for async processing
implementation 'org.jetbrains.kotlinx:kotlinx-coroutines-android:1.7.3'
}

Binary file not shown.

View File

@ -3,9 +3,15 @@
<uses-permission android:name="android.permission.WRITE_CONTACTS"/>
<uses-permission android:name="android.permission.CALL_PHONE" />
<uses-permission android:name="android.permission.SEND_SMS" />
<uses-permission android:name="android.permission.READ_BLOCKED_NUMBERS" />
<uses-permission android:name="android.permission.WRITE_BLOCKED_NUMBERS" />
<uses-permission android:name="android.permission.READ_CALL_LOG"/>
<uses-permission android:name="android.permission.READ_PHONE_STATE"/>
<uses-permission android:name="android.permission.MANAGE_OWN_CALLS" />
<uses-permission android:name="android.permission.ANSWER_PHONE_CALLS" />
<uses-permission android:name="android.permission.PROCESS_OUTGOING_CALLS" />
<uses-permission android:name="android.permission.POST_NOTIFICATIONS" />
<uses-permission android:name="android.permission.SYSTEM_ALERT_WINDOW" />
<uses-feature android:name="android.hardware.camera" android:required="false" />
<uses-feature android:name="android.hardware.telephony" android:required="true" />
<!-- The INTERNET permission is required for development. Specifically,
the Flutter tool needs it to communicate with the running application
to allow setting breakpoints, to provide hot reload, etc.

View File

@ -5,8 +5,6 @@
<uses-permission android:name="android.permission.WRITE_CONTACTS"/>
<uses-permission android:name="android.permission.CALL_PHONE" />
<uses-permission android:name="android.permission.SEND_SMS" />
<uses-permission android:name="android.permission.READ_BLOCKED_NUMBERS" />
<uses-permission android:name="android.permission.WRITE_BLOCKED_NUMBERS" />
<uses-permission android:name="android.permission.READ_CALL_LOG"/>
<uses-permission android:name="android.permission.READ_PHONE_STATE"/>
<uses-permission android:name="android.permission.MANAGE_OWN_CALLS" />
@ -73,7 +71,7 @@
android:name="android.telecom.IN_CALL_SERVICE_UI"
android:value="true" />
</service>
<!-- Custom ConnextionService, will be needed at some point when we implement our own protocol -->
<!-- Custom ConnectionService, will be needed when we implement our own protocol -->
<!-- <service
android:name=".services.CallConnectionService"
android:permission="android.permission.BIND_TELECOM_CONNECTION_SERVICE"

View File

@ -1,28 +0,0 @@
package com.icing.dialer
import java.security.KeyStore
object KeyDeleterHelper {
private const val ANDROID_KEYSTORE = "AndroidKeyStore"
/**
* Deletes the key pair associated with the given alias from the Android Keystore.
*
* @param alias The alias of the key pair to delete.
* @throws Exception if deletion fails.
*/
fun deleteKeyPair(alias: String) {
try {
val keyStore = KeyStore.getInstance(ANDROID_KEYSTORE).apply { load(null) }
if (!keyStore.containsAlias(alias)) {
throw Exception("No key found with alias \"$alias\" to delete.")
}
keyStore.deleteEntry(alias)
} catch (e: Exception) {
throw Exception("Failed to delete key pair: ${e.message}", e)
}
}
}

View File

@ -1,47 +0,0 @@
package com.icing.dialer
import android.security.keystore.KeyGenParameterSpec
import android.security.keystore.KeyProperties
import java.security.KeyPairGenerator
import java.security.KeyStore
object KeyGeneratorHelper {
private const val ANDROID_KEYSTORE = "AndroidKeyStore"
/**
* Generates an ECDSA P-256 key pair and stores it in the Android Keystore.
*
* @param alias Unique identifier for the key pair.
* @throws Exception if key generation fails.
*/
fun generateECKeyPair(alias: String) {
try {
val keyStore = KeyStore.getInstance(ANDROID_KEYSTORE).apply { load(null) }
// Check if the key already exists
if (keyStore.containsAlias(alias)) {
throw Exception("Key with alias \"$alias\" already exists.")
}
val keyPairGenerator = KeyPairGenerator.getInstance(
KeyProperties.KEY_ALGORITHM_EC,
ANDROID_KEYSTORE
)
val parameterSpec = KeyGenParameterSpec.Builder(
alias,
KeyProperties.PURPOSE_SIGN or KeyProperties.PURPOSE_VERIFY
)
.setAlgorithmParameterSpec(java.security.spec.ECGenParameterSpec("secp256r1"))
.setDigests(KeyProperties.DIGEST_SHA256, KeyProperties.DIGEST_SHA384, KeyProperties.DIGEST_SHA512)
.setUserAuthenticationRequired(false) // Set to true if you require user authentication
.build()
keyPairGenerator.initialize(parameterSpec)
keyPairGenerator.generateKeyPair()
} catch (e: Exception) {
throw Exception("Failed to generate EC key pair: ${e.message}", e)
}
}
}

View File

@ -1,6 +1,6 @@
package com.icing.dialer
import java.security.PrivateKey
import android.os.Build
import android.security.keystore.KeyGenParameterSpec
import android.security.keystore.KeyProperties
import android.util.Base64
@ -8,15 +8,21 @@ import io.flutter.plugin.common.MethodCall
import io.flutter.plugin.common.MethodChannel
import java.security.KeyPairGenerator
import java.security.KeyStore
import java.security.PrivateKey
import java.security.Signature
import java.security.spec.ECGenParameterSpec
class KeystoreHelper(private val call: MethodCall, private val result: MethodChannel.Result) {
private val ANDROID_KEYSTORE = "AndroidKeyStore"
fun handleMethodCall() {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.R) {
result.error("UNSUPPORTED_API", "ED25519 requires Android 11 (API 30) or higher", null)
return
}
when (call.method) {
"generateKeyPair" -> generateECKeyPair()
"generateKeyPair" -> generateEDKeyPair()
"signData" -> signData()
"getPublicKey" -> getPublicKey()
"deleteKeyPair" -> deleteKeyPair()
@ -25,7 +31,7 @@ class KeystoreHelper(private val call: MethodCall, private val result: MethodCha
}
}
private fun generateECKeyPair() {
private fun generateEDKeyPair() {
val alias = call.argument<String>("alias")
if (alias == null) {
result.error("INVALID_ARGUMENT", "Alias is required", null)
@ -44,16 +50,14 @@ class KeystoreHelper(private val call: MethodCall, private val result: MethodCha
KeyProperties.KEY_ALGORITHM_EC,
ANDROID_KEYSTORE
)
val parameterSpec = KeyGenParameterSpec.Builder(
alias,
KeyProperties.PURPOSE_SIGN or KeyProperties.PURPOSE_VERIFY
)
.setAlgorithmParameterSpec(java.security.spec.ECGenParameterSpec("secp256r1"))
.setDigests(KeyProperties.DIGEST_SHA256, KeyProperties.DIGEST_SHA384, KeyProperties.DIGEST_SHA512)
.setAlgorithmParameterSpec(ECGenParameterSpec("ed25519"))
.setDigests(KeyProperties.DIGEST_SHA256)
.setUserAuthenticationRequired(false)
.build()
keyPairGenerator.initialize(parameterSpec)
keyPairGenerator.generateKeyPair()
@ -73,17 +77,14 @@ class KeystoreHelper(private val call: MethodCall, private val result: MethodCha
try {
val keyStore = KeyStore.getInstance(ANDROID_KEYSTORE).apply { load(null) }
val privateKey = keyStore.getKey(alias, null) as? PrivateKey ?: run {
result.error("KEY_NOT_FOUND", "Private key not found for alias \"$alias\".", null)
return
}
val signature = Signature.getInstance("SHA256withECDSA")
val signature = Signature.getInstance("Ed25519")
signature.initSign(privateKey)
signature.update(data.toByteArray())
val signedBytes = signature.sign()
val signatureBase64 = Base64.encodeToString(signedBytes, Base64.DEFAULT)
result.success(signatureBase64)
} catch (e: Exception) {

View File

@ -0,0 +1,165 @@
package com.icing.dialer
import android.util.Base64
import com.southernstorm.noise.protocol.CipherState
import com.southernstorm.noise.protocol.CipherStatePair
import com.southernstorm.noise.protocol.HandshakeState
import com.southernstorm.noise.protocol.Noise
import javax.crypto.BadPaddingException
import javax.crypto.ShortBufferException
import java.security.NoSuchAlgorithmException
import java.util.Arrays
class NoiseHandler(
private val localKeyBase64: String, // ED25519 private (initiator) or public (responder) key (Base64-encoded)
private val remotePublicKeyBase64: String // Remote ED25519 public key (Base64-encoded)
) {
private var handshakeState: HandshakeState? = null
private var cipherStatePair: CipherStatePair? = null
/**
* Wipes sensitive data by filling the byte array with zeros.
*/
private fun wipe(data: ByteArray?) {
data?.let { Arrays.fill(it, 0.toByte()) }
}
/**
* Initializes the Noise handshake.
* @param isInitiator True if this is the initiator, false if responder.
* @return The initial handshake message.
* @throws IllegalArgumentException If keys are invalid.
* @throws IllegalStateException If handshake fails to start.
*/
fun initialize(isInitiator: Boolean): ByteArray {
var localKey: ByteArray? = null
var remotePublicKey: ByteArray? = null
try {
val protocolName = "Noise_IK_25519_AESGCM_SHA256"
handshakeState = HandshakeState(
protocolName,
if (isInitiator) HandshakeState.INITIATOR else HandshakeState.RESPONDER
)
// Set local key (private for initiator, public for responder)
localKey = Base64.decode(localKeyBase64, Base64.DEFAULT)
if (localKey.size != 32) {
throw IllegalArgumentException("Invalid local key size: ${localKey.size}")
}
if (isInitiator) {
handshakeState?.localKeyPair?.setPrivateKey(localKey, 0)
?: throw IllegalStateException("Local key pair not initialized")
} else {
handshakeState?.localKeyPair?.setPublicKey(localKey, 0)
?: throw IllegalStateException("Local key pair not initialized")
}
// Set remote public key
remotePublicKey = Base64.decode(remotePublicKeyBase64, Base64.DEFAULT)
if (remotePublicKey.size != 32) {
throw IllegalArgumentException("Invalid remote public key size: ${remotePublicKey.size}")
}
handshakeState?.remotePublicKey?.setPublicKey(remotePublicKey, 0)
?: throw IllegalStateException("Remote public key not initialized")
// Start handshake and write initial message
handshakeState?.start() ?: throw IllegalStateException("Handshake state not initialized")
val messageBuffer = ByteArray(256) // Sufficient for IK initial message
val payload = ByteArray(0) // Empty payload
val writtenLength: Int = handshakeState?.writeMessage(
messageBuffer, 0, payload, 0, payload.size
) ?: throw IllegalStateException("Failed to write handshake message")
return messageBuffer.copyOf(writtenLength)
} catch (e: NoSuchAlgorithmException) {
throw IllegalStateException("Unsupported algorithm: ${e.message}", e)
} catch (e: ShortBufferException) {
throw IllegalStateException("Buffer too small for handshake message", e)
} finally {
wipe(localKey)
wipe(remotePublicKey)
}
}
/**
* Processes a handshake message and returns the next message or null if complete.
* @param message The received handshake message.
* @return The next handshake message or null if handshake is complete.
* @throws IllegalStateException If handshake state is invalid.
* @throws BadPaddingException If message decryption fails.
*/
fun processHandshakeMessage(message: ByteArray): ByteArray? {
try {
val handshake = handshakeState ?: throw IllegalStateException("Handshake not initialized")
val messageBuffer = ByteArray(256) // Sufficient for IK payload + MAC
val writtenLength: Int = handshake.readMessage(
message, 0, message.size, messageBuffer, 0
)
if (handshake.getAction() == HandshakeState.SPLIT) {
cipherStatePair = handshake.split()
return null // Handshake complete
}
// Write next message
val payload = ByteArray(0) // Empty payload
val nextMessage = ByteArray(256)
val nextWrittenLength: Int = handshake.writeMessage(
nextMessage, 0, payload, 0, payload.size
)
return nextMessage.copyOf(nextWrittenLength)
} catch (e: ShortBufferException) {
throw IllegalStateException("Buffer too small for handshake message", e)
} catch (e: BadPaddingException) {
throw IllegalStateException("Invalid handshake message: ${e.message}", e)
}
}
/**
* Encrypts data using the sender's cipher state.
* @param data The data to encrypt.
* @return The encrypted data.
* @throws IllegalStateException If handshake is not completed.
*/
fun encryptData(data: ByteArray): ByteArray {
val cipherState = cipherStatePair?.getSender()
?: throw IllegalStateException("Handshake not completed")
try {
val outputBuffer = ByteArray(data.size + cipherState.getMACLength()) // Account for AES-GCM MAC
val length: Int = cipherState.encryptWithAd(null, data, 0, outputBuffer, 0, data.size)
return outputBuffer.copyOf(length)
} catch (e: ShortBufferException) {
throw IllegalStateException("Buffer too small for encryption: ${e.message}", e)
}
}
/**
* Decrypts data using the receiver's cipher state.
* @param data The encrypted data.
* @return The decrypted data.
* @throws IllegalStateException If handshake is not completed.
* @throws BadPaddingException If decryption fails.
*/
fun decryptData(data: ByteArray): ByteArray {
val cipherState = cipherStatePair?.getReceiver()
?: throw IllegalStateException("Handshake not completed")
try {
val outputBuffer = ByteArray(data.size)
val length: Int = cipherState.decryptWithAd(null, data, 0, outputBuffer, 0, data.size)
return outputBuffer.copyOf(length)
} catch (e: ShortBufferException) {
throw IllegalStateException("Buffer too small for decryption: ${e.message}", e)
} catch (e: BadPaddingException) {
throw IllegalStateException("Invalid ciphertext: ${e.message}", e)
}
}
/**
* Cleans up sensitive cryptographic data.
*/
fun destroy() {
handshakeState?.destroy()
cipherStatePair?.destroy()
handshakeState = null
cipherStatePair = null
}
}

View File

@ -1,30 +0,0 @@
package com.icing.dialer
import java.security.KeyStore
import java.security.PublicKey
import android.util.Base64
object PublicKeyHelper {
private const val ANDROID_KEYSTORE = "AndroidKeyStore"
/**
* Retrieves the public key associated with the given alias.
*
* @param alias The alias of the key pair.
* @return The public key as a Base64-encoded string.
* @throws Exception if retrieval fails.
*/
fun getPublicKey(alias: String): String {
try {
val keyStore = KeyStore.getInstance(ANDROID_KEYSTORE).apply { load(null) }
val certificate = keyStore.getCertificate(alias) ?: throw Exception("Certificate not found for alias \"$alias\".")
val publicKey: PublicKey = certificate.publicKey
return Base64.encodeToString(publicKey.encoded, Base64.DEFAULT)
} catch (e: Exception) {
throw Exception("Failed to retrieve public key: ${e.message}", e)
}
}
}

View File

@ -1,37 +0,0 @@
package com.icing.dialer
import android.security.keystore.KeyProperties
import java.security.KeyStore
import java.security.Signature
import android.util.Base64
import java.security.PrivateKey
object SignerHelper {
private const val ANDROID_KEYSTORE = "AndroidKeyStore"
/**
* Signs the provided data using the private key associated with the given alias.
*
* @param alias The alias of the key pair.
* @param data The data to sign.
* @return The signature as a Base64-encoded string.
* @throws Exception if signing fails.
*/
fun signData(alias: String, data: ByteArray): String {
try {
val keyStore = KeyStore.getInstance(ANDROID_KEYSTORE).apply { load(null) }
val privateKey = keyStore.getKey(alias, null) as? PrivateKey?: throw Exception("Private key not found for alias \"$alias\".")
val signature = Signature.getInstance("SHA256withECDSA")
signature.initSign(privateKey)
signature.update(data)
val signedBytes = signature.sign()
return Base64.encodeToString(signedBytes, Base64.DEFAULT)
} catch (e: Exception) {
throw Exception("Failed to sign data: ${e.message}", e)
}
}
}

View File

@ -0,0 +1,102 @@
package com.icing.dialer.modem
import com.theeasiestway.opus.Opus
import java.nio.ByteBuffer
import java.nio.ShortBuffer
class AudioCodec {
private var encoder: Long = 0
private var decoder: Long = 0
private val opus = Opus()
init {
// Initialize Opus encoder and decoder
encoder = opus.encoderCreate(
FSKConstants.SAMPLE_RATE,
1, // Mono
Opus.OPUS_APPLICATION_VOIP
)
decoder = opus.decoderCreate(
FSKConstants.SAMPLE_RATE,
1 // Mono
)
// Configure encoder
opus.encoderSetBitrate(encoder, FSKConstants.OPUS_BITRATE)
opus.encoderSetComplexity(encoder, FSKConstants.OPUS_COMPLEXITY)
opus.encoderSetSignal(encoder, Opus.OPUS_SIGNAL_VOICE)
opus.encoderSetPacketLossPerc(encoder, 10) // Expect 10% packet loss
opus.encoderSetInbandFEC(encoder, 1) // Enable FEC
opus.encoderSetDTX(encoder, 1) // Enable discontinuous transmission
}
fun encode(audioData: ShortArray): ByteArray {
val maxEncodedSize = 1024
val encodedData = ByteArray(maxEncodedSize)
val encodedLength = opus.encode(
encoder,
audioData,
FSKConstants.OPUS_FRAME_SIZE,
encodedData
)
return if (encodedLength > 0) {
encodedData.copyOf(encodedLength)
} else {
throw RuntimeException("Opus encoding failed with error: $encodedLength")
}
}
fun decode(encodedData: ByteArray): ShortArray {
val decodedData = ShortArray(FSKConstants.OPUS_FRAME_SIZE)
val decodedSamples = opus.decode(
decoder,
encodedData,
decodedData,
FSKConstants.OPUS_FRAME_SIZE,
0 // No packet loss
)
return if (decodedSamples > 0) {
decodedData.copyOf(decodedSamples)
} else {
throw RuntimeException("Opus decoding failed with error: $decodedSamples")
}
}
fun decodeLost(): ShortArray {
val decodedData = ShortArray(FSKConstants.OPUS_FRAME_SIZE)
val decodedSamples = opus.decode(
decoder,
null,
decodedData,
FSKConstants.OPUS_FRAME_SIZE,
1 // Packet lost
)
return if (decodedSamples > 0) {
decodedData.copyOf(decodedSamples)
} else {
ShortArray(FSKConstants.OPUS_FRAME_SIZE) // Return silence
}
}
fun release() {
if (encoder != 0L) {
opus.encoderDestroy(encoder)
encoder = 0
}
if (decoder != 0L) {
opus.decoderDestroy(decoder)
decoder = 0
}
}
protected fun finalize() {
release()
}
}

View File

@ -0,0 +1,31 @@
package com.icing.dialer.modem
object FSKConstants {
// 4FSK frequency configuration
const val SAMPLE_RATE = 48000 // 48 kHz sample rate for high quality
const val SYMBOL_RATE = 2400 // 2400 baud
const val SAMPLES_PER_SYMBOL = SAMPLE_RATE / SYMBOL_RATE // 20 samples per symbol
// 4FSK frequencies (Hz) - evenly spaced for optimal detection
const val FREQ_00 = 1200.0 // Symbol 00
const val FREQ_01 = 1800.0 // Symbol 01
const val FREQ_10 = 2400.0 // Symbol 10
const val FREQ_11 = 3000.0 // Symbol 11
// Frame structure
const val SYNC_PATTERN = 0x7E6B2840L // 32-bit sync pattern
const val FRAME_SIZE = 256 // bytes per frame
const val PREAMBLE_LENGTH = 32 // symbols
// Error correction
const val FEC_OVERHEAD = 1.5 // Reed-Solomon overhead factor
// Audio codec settings
const val OPUS_FRAME_SIZE = 960 // 20ms at 48kHz
const val OPUS_BITRATE = 16000 // 16 kbps
const val OPUS_COMPLEXITY = 5 // Medium complexity
// Buffer sizes
const val AUDIO_BUFFER_SIZE = 4096
const val SYMBOL_BUFFER_SIZE = 1024
}

View File

@ -0,0 +1,214 @@
package com.icing.dialer.modem
import org.apache.commons.math3.complex.Complex
import org.apache.commons.math3.transform.DftNormalization
import org.apache.commons.math3.transform.FastFourierTransformer
import org.apache.commons.math3.transform.TransformType
import kotlin.math.*
class FSKDemodulator {
private val fft = FastFourierTransformer(DftNormalization.STANDARD)
private val symbolBuffer = mutableListOf<Int>()
private var sampleBuffer = FloatArray(0)
private var syncFound = false
private var syncPosition = 0
// Moving average filters for each frequency
private val freq00Filter = MovingAverageFilter(FSKConstants.SAMPLES_PER_SYMBOL / 2)
private val freq01Filter = MovingAverageFilter(FSKConstants.SAMPLES_PER_SYMBOL / 2)
private val freq10Filter = MovingAverageFilter(FSKConstants.SAMPLES_PER_SYMBOL / 2)
private val freq11Filter = MovingAverageFilter(FSKConstants.SAMPLES_PER_SYMBOL / 2)
// Demodulate audio samples to symbols
fun demodulateSamples(samples: FloatArray): IntArray {
val symbols = mutableListOf<Int>()
// Process samples in chunks of SAMPLES_PER_SYMBOL
for (i in 0 until samples.size - FSKConstants.SAMPLES_PER_SYMBOL step FSKConstants.SAMPLES_PER_SYMBOL) {
val symbolSamples = samples.sliceArray(i until i + FSKConstants.SAMPLES_PER_SYMBOL)
val symbol = detectSymbol(symbolSamples)
symbols.add(symbol)
}
return symbols.toIntArray()
}
// Non-coherent detection using Goertzel algorithm for efficiency
private fun detectSymbol(samples: FloatArray): Int {
val power00 = goertzelMagnitude(samples, FSKConstants.FREQ_00)
val power01 = goertzelMagnitude(samples, FSKConstants.FREQ_01)
val power10 = goertzelMagnitude(samples, FSKConstants.FREQ_10)
val power11 = goertzelMagnitude(samples, FSKConstants.FREQ_11)
// Apply moving average filter to reduce noise
val filtered00 = freq00Filter.filter(power00)
val filtered01 = freq01Filter.filter(power01)
val filtered10 = freq10Filter.filter(power10)
val filtered11 = freq11Filter.filter(power11)
// Find maximum power
val powers = floatArrayOf(filtered00, filtered01, filtered10, filtered11)
var maxIndex = 0
var maxPower = powers[0]
for (i in 1 until powers.size) {
if (powers[i] > maxPower) {
maxPower = powers[i]
maxIndex = i
}
}
return maxIndex
}
// Goertzel algorithm for single frequency detection
private fun goertzelMagnitude(samples: FloatArray, targetFreq: Double): Float {
val k = round(samples.size * targetFreq / FSKConstants.SAMPLE_RATE).toInt()
val omega = 2.0 * PI * k / samples.size
val cosine = cos(omega)
val coeff = 2.0 * cosine
var q0 = 0.0
var q1 = 0.0
var q2 = 0.0
for (sample in samples) {
q0 = coeff * q1 - q2 + sample
q2 = q1
q1 = q0
}
val real = q1 - q2 * cosine
val imag = q2 * sin(omega)
return sqrt(real * real + imag * imag).toFloat()
}
// Find preamble in audio stream
fun findPreamble(samples: FloatArray): Int {
val preamblePattern = intArrayOf(1, 2, 1, 2, 1, 2, 1, 2) // 01 10 01 10...
val correlationThreshold = 0.8f
for (i in 0 until samples.size - (preamblePattern.size * FSKConstants.SAMPLES_PER_SYMBOL)) {
var correlation = 0.0f
var patternPower = 0.0f
var signalPower = 0.0f
for (j in preamblePattern.indices) {
val startIdx = i + j * FSKConstants.SAMPLES_PER_SYMBOL
val endIdx = startIdx + FSKConstants.SAMPLES_PER_SYMBOL
if (endIdx <= samples.size) {
val symbolSamples = samples.sliceArray(startIdx until endIdx)
val detectedSymbol = detectSymbol(symbolSamples)
if (detectedSymbol == preamblePattern[j]) {
correlation += 1.0f
}
// Calculate signal power for SNR estimation
for (sample in symbolSamples) {
signalPower += sample * sample
}
}
}
val normalizedCorrelation = correlation / preamblePattern.size
if (normalizedCorrelation >= correlationThreshold) {
return i
}
}
return -1 // Preamble not found
}
// Convert symbols back to bytes
fun symbolsToBytes(symbols: IntArray): ByteArray {
val bytes = ByteArray(symbols.size / 4)
var byteIndex = 0
for (i in symbols.indices step 4) {
if (i + 3 < symbols.size) {
val byte = ((symbols[i] and 0x03) shl 6) or
((symbols[i + 1] and 0x03) shl 4) or
((symbols[i + 2] and 0x03) shl 2) or
(symbols[i + 3] and 0x03)
bytes[byteIndex++] = byte.toByte()
}
}
return bytes.sliceArray(0 until byteIndex)
}
// Carrier frequency offset estimation and correction
fun estimateFrequencyOffset(samples: FloatArray): Double {
// Use pilot tone or known preamble for frequency offset estimation
val fftSize = 1024
val paddedSamples = samples.copyOf(fftSize)
// Convert to complex array for FFT
val complexSamples = Array(fftSize) { i ->
if (i < samples.size) Complex(paddedSamples[i].toDouble()) else Complex.ZERO
}
val spectrum = fft.transform(complexSamples, TransformType.FORWARD)
// Find peak frequencies
var maxMagnitude = 0.0
var peakBin = 0
for (i in spectrum.indices) {
val magnitude = spectrum[i].abs()
if (magnitude > maxMagnitude) {
maxMagnitude = magnitude
peakBin = i
}
}
// Calculate frequency offset
val detectedFreq = peakBin * FSKConstants.SAMPLE_RATE.toDouble() / fftSize
val expectedFreq = (FSKConstants.FREQ_00 + FSKConstants.FREQ_11) / 2 // Center frequency
return detectedFreq - expectedFreq
}
// Reset demodulator state
fun reset() {
symbolBuffer.clear()
sampleBuffer = FloatArray(0)
syncFound = false
syncPosition = 0
freq00Filter.reset()
freq01Filter.reset()
freq10Filter.reset()
freq11Filter.reset()
}
// Simple moving average filter
private class MovingAverageFilter(private val windowSize: Int) {
private val buffer = FloatArray(windowSize)
private var index = 0
private var sum = 0.0f
private var count = 0
fun filter(value: Float): Float {
sum -= buffer[index]
buffer[index] = value
sum += value
index = (index + 1) % windowSize
if (count < windowSize) {
count++
}
return sum / count
}
fun reset() {
buffer.fill(0.0f)
index = 0
sum = 0.0f
count = 0
}
}
}

View File

@ -0,0 +1,246 @@
package com.icing.dialer.modem
import android.media.*
import kotlinx.coroutines.*
import kotlinx.coroutines.channels.Channel
import kotlinx.coroutines.flow.*
import java.util.concurrent.ConcurrentLinkedQueue
class FSKModem {
private val audioCodec = AudioCodec()
private val modulator = FSKModulator()
private val demodulator = FSKDemodulator()
private val frameProcessor = FrameProcessor()
private var audioRecord: AudioRecord? = null
private var audioTrack: AudioTrack? = null
private val txQueue = ConcurrentLinkedQueue<ByteArray>()
private val rxQueue = ConcurrentLinkedQueue<ByteArray>()
private val scope = CoroutineScope(Dispatchers.IO + SupervisorJob())
private var isRunning = false
// Flow for received data
private val _receivedData = MutableSharedFlow<ByteArray>()
val receivedData: SharedFlow<ByteArray> = _receivedData.asSharedFlow()
// Modem states
enum class ModemState {
IDLE, TRANSMITTING, RECEIVING, ERROR
}
private val _state = MutableStateFlow(ModemState.IDLE)
val state: StateFlow<ModemState> = _state.asStateFlow()
fun initialize() {
setupAudioRecord()
setupAudioTrack()
}
private fun setupAudioRecord() {
val bufferSize = AudioRecord.getMinBufferSize(
FSKConstants.SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT
)
audioRecord = AudioRecord(
MediaRecorder.AudioSource.MIC,
FSKConstants.SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT,
bufferSize * 2
)
}
private fun setupAudioTrack() {
val bufferSize = AudioTrack.getMinBufferSize(
FSKConstants.SAMPLE_RATE,
AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT
)
audioTrack = AudioTrack(
AudioManager.STREAM_MUSIC,
FSKConstants.SAMPLE_RATE,
AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT,
bufferSize * 2,
AudioTrack.MODE_STREAM
)
}
fun start() {
if (isRunning) return
isRunning = true
audioRecord?.startRecording()
audioTrack?.play()
// Start coroutines for TX and RX
scope.launch { transmitLoop() }
scope.launch { receiveLoop() }
}
fun stop() {
isRunning = false
audioRecord?.stop()
audioTrack?.stop()
scope.cancel()
}
fun sendData(data: ByteArray) {
txQueue.offer(data)
}
private suspend fun transmitLoop() {
val audioBuffer = ShortArray(FSKConstants.OPUS_FRAME_SIZE)
while (isRunning) {
if (txQueue.isNotEmpty()) {
_state.value = ModemState.TRANSMITTING
val data = txQueue.poll() ?: continue
try {
// Encode audio data with Opus
val encodedAudio = audioCodec.encode(data.toShortArray())
// Create frame with error correction
val frame = frameProcessor.createFrame(encodedAudio)
val frameBytes = frame.toByteArray()
// Convert to symbols
val symbols = modulator.bytesToSymbols(frameBytes)
// Generate preamble
val preamble = modulator.generatePreamble()
// Modulate symbols
val modulatedData = modulator.modulateSymbols(symbols)
// Apply raised cosine filter
val filtered = modulator.applyRaisedCosineFilter(modulatedData)
// Combine preamble and data
val txSamples = FloatArray(preamble.size + filtered.size)
System.arraycopy(preamble, 0, txSamples, 0, preamble.size)
System.arraycopy(filtered, 0, txSamples, preamble.size, filtered.size)
// Convert to 16-bit PCM and transmit
val pcmData = ShortArray(txSamples.size) { i ->
(txSamples[i] * 32767).toInt().coerceIn(-32768, 32767).toShort()
}
audioTrack?.write(pcmData, 0, pcmData.size)
} catch (e: Exception) {
_state.value = ModemState.ERROR
e.printStackTrace()
}
_state.value = ModemState.IDLE
}
delay(10) // Small delay to prevent busy waiting
}
}
private suspend fun receiveLoop() {
val audioBuffer = ShortArray(FSKConstants.AUDIO_BUFFER_SIZE)
val sampleBuffer = mutableListOf<Float>()
while (isRunning) {
val bytesRead = audioRecord?.read(audioBuffer, 0, audioBuffer.size) ?: 0
if (bytesRead > 0) {
_state.value = ModemState.RECEIVING
// Convert to float samples
val samples = FloatArray(bytesRead) { i ->
audioBuffer[i] / 32768.0f
}
sampleBuffer.addAll(samples.toList())
// Look for preamble
if (sampleBuffer.size >= FSKConstants.PREAMBLE_LENGTH * FSKConstants.SAMPLES_PER_SYMBOL) {
val bufferArray = sampleBuffer.toFloatArray()
val preambleIndex = demodulator.findPreamble(bufferArray)
if (preambleIndex >= 0) {
// Preamble found, extract frame
val frameStart = preambleIndex +
(FSKConstants.PREAMBLE_LENGTH * FSKConstants.SAMPLES_PER_SYMBOL)
if (frameStart < bufferArray.size) {
// Estimate and correct frequency offset
val frameSection = bufferArray.sliceArray(
frameStart until minOf(
frameStart + FSKConstants.FRAME_SIZE * 4 * FSKConstants.SAMPLES_PER_SYMBOL,
bufferArray.size
)
)
// Demodulate symbols
val symbols = demodulator.demodulateSamples(frameSection)
// Convert symbols to bytes
val frameBytes = demodulator.symbolsToBytes(symbols)
// Process frame (error correction and CRC check)
val decodedData = frameProcessor.processFrame(frameBytes)
if (decodedData != null) {
// Decode audio with Opus
val audioData = audioCodec.decode(decodedData)
// Emit received data
_receivedData.emit(audioData.toByteArray())
}
// Remove processed samples
sampleBuffer.subList(0, frameStart + frameSection.size).clear()
}
}
// Limit buffer size to prevent memory issues
if (sampleBuffer.size > FSKConstants.SAMPLE_RATE * 2) {
sampleBuffer.subList(0, FSKConstants.SAMPLE_RATE).clear()
}
}
_state.value = ModemState.IDLE
}
delay(10)
}
}
fun release() {
stop()
audioRecord?.release()
audioTrack?.release()
audioCodec.release()
audioRecord = null
audioTrack = null
}
// Utility extension functions
private fun ByteArray.toShortArray(): ShortArray {
return ShortArray(size / 2) { i ->
((this[i * 2].toInt() and 0xFF) or
((this[i * 2 + 1].toInt() and 0xFF) shl 8)).toShort()
}
}
private fun ShortArray.toByteArray(): ByteArray {
val bytes = ByteArray(size * 2)
for (i in indices) {
bytes[i * 2] = (this[i].toInt() and 0xFF).toByte()
bytes[i * 2 + 1] = ((this[i].toInt() shr 8) and 0xFF).toByte()
}
return bytes
}
}

View File

@ -0,0 +1,216 @@
package com.icing.dialer.modem
import android.Manifest
import android.content.Context
import android.content.pm.PackageManager
import androidx.core.app.ActivityCompat
import kotlinx.coroutines.*
import kotlinx.coroutines.flow.collect
class FSKModemExample(private val context: Context) {
private val modem = FSKModem()
private val scope = CoroutineScope(Dispatchers.Main + SupervisorJob())
fun checkPermissions(): Boolean {
return ActivityCompat.checkSelfPermission(
context,
Manifest.permission.RECORD_AUDIO
) == PackageManager.PERMISSION_GRANTED
}
fun startModem() {
if (!checkPermissions()) {
println("Audio recording permission not granted")
return
}
// Initialize modem
modem.initialize()
// Set up data reception handler
scope.launch {
modem.receivedData.collect { data ->
handleReceivedData(data)
}
}
// Monitor modem state
scope.launch {
modem.state.collect { state ->
println("Modem state: $state")
}
}
// Start modem
modem.start()
}
fun sendTextMessage(message: String) {
val data = message.toByteArray(Charsets.UTF_8)
modem.sendData(data)
}
fun sendBinaryData(data: ByteArray) {
// Split large data into frames if necessary
val maxPayloadSize = 200 // Based on Reed-Solomon configuration
for (i in data.indices step maxPayloadSize) {
val chunk = data.sliceArray(
i until minOf(i + maxPayloadSize, data.size)
)
modem.sendData(chunk)
}
}
private fun handleReceivedData(data: ByteArray) {
// Handle received data
try {
val message = String(data, Charsets.UTF_8)
println("Received message: $message")
} catch (e: Exception) {
println("Received binary data: ${data.size} bytes")
}
}
fun stopModem() {
modem.stop()
modem.release()
scope.cancel()
}
// Example: Voice communication
fun startVoiceTransmission() {
scope.launch(Dispatchers.IO) {
// In a real implementation, you would capture audio from microphone
// and send it through the modem
while (isActive) {
// Simulated audio data (replace with actual audio capture)
val audioFrame = ShortArray(FSKConstants.OPUS_FRAME_SIZE)
// Fill audioFrame with audio samples...
// Convert to byte array and send
val audioBytes = audioFrame.toByteArray()
modem.sendData(audioBytes)
delay(20) // 20ms frames
}
}
}
private fun ShortArray.toByteArray(): ByteArray {
val bytes = ByteArray(size * 2)
for (i in indices) {
bytes[i * 2] = (this[i].toInt() and 0xFF).toByte()
bytes[i * 2 + 1] = ((this[i].toInt() shr 8) and 0xFF).toByte()
}
return bytes
}
}
// Unit tests
class FSKModemTest {
fun testModulation() {
val modulator = FSKModulator()
// Test data
val testData = byteArrayOf(0x55, 0xAA.toByte(), 0x0F, 0xF0.toByte())
// Convert to symbols
val symbols = modulator.bytesToSymbols(testData)
println("Symbols: ${symbols.joinToString()}")
// Generate preamble
val preamble = modulator.generatePreamble()
println("Preamble length: ${preamble.size} samples")
// Modulate symbols
val modulated = modulator.modulateSymbols(symbols)
println("Modulated signal length: ${modulated.size} samples")
// Apply filter
val filtered = modulator.applyRaisedCosineFilter(modulated)
println("Filtered signal length: ${filtered.size} samples")
}
fun testDemodulation() {
val modulator = FSKModulator()
val demodulator = FSKDemodulator()
// Test data
val testData = "Hello FSK Modem!".toByteArray()
// Modulate
val symbols = modulator.bytesToSymbols(testData)
val preamble = modulator.generatePreamble()
val modulated = modulator.modulateSymbols(symbols)
// Combine preamble and data
val signal = FloatArray(preamble.size + modulated.size)
System.arraycopy(preamble, 0, signal, 0, preamble.size)
System.arraycopy(modulated, 0, signal, preamble.size, modulated.size)
// Find preamble
val preambleIndex = demodulator.findPreamble(signal)
println("Preamble found at index: $preambleIndex")
// Demodulate
val dataStart = preambleIndex + preamble.size
val dataSignal = signal.sliceArray(dataStart until signal.size)
val demodSymbols = demodulator.demodulateSamples(dataSignal)
// Convert back to bytes
val demodData = demodulator.symbolsToBytes(demodSymbols)
val demodMessage = String(demodData, 0, testData.size)
println("Demodulated message: $demodMessage")
}
fun testFrameProcessing() {
val processor = FrameProcessor()
// Test data
val testData = ByteArray(200) { it.toByte() }
// Create frame
val frame = processor.createFrame(testData)
println("Frame size: ${frame.toByteArray().size} bytes")
// Simulate transmission (no errors)
val frameBytes = frame.toByteArray()
// Process received frame
val decoded = processor.processFrame(frameBytes)
println("Decoded data: ${decoded?.size} bytes")
// Verify data integrity
if (decoded != null && decoded.contentEquals(testData)) {
println("Frame processing successful!")
}
}
fun testAudioCodec() {
try {
val codec = AudioCodec()
// Test audio data (sine wave)
val testAudio = ShortArray(FSKConstants.OPUS_FRAME_SIZE) { i ->
(32767 * kotlin.math.sin(2 * kotlin.math.PI * 440 * i / FSKConstants.SAMPLE_RATE)).toInt().toShort()
}
// Encode
val encoded = codec.encode(testAudio)
println("Encoded size: ${encoded.size} bytes (compression ratio: ${testAudio.size * 2.0 / encoded.size})")
// Decode
val decoded = codec.decode(encoded)
println("Decoded samples: ${decoded.size}")
// Test packet loss handling
val lostPacket = codec.decodeLost()
println("Lost packet recovery: ${lostPacket.size} samples")
codec.release()
} catch (e: Exception) {
println("Audio codec test failed: ${e.message}")
}
}
}

View File

@ -0,0 +1,138 @@
package com.icing.dialer.modem
import kotlin.math.*
class FSKModulator {
private var phase = 0.0
private val symbolBuffer = mutableListOf<Int>()
// Generate preamble for synchronization
fun generatePreamble(): FloatArray {
val samples = FloatArray(FSKConstants.PREAMBLE_LENGTH * FSKConstants.SAMPLES_PER_SYMBOL)
var sampleIndex = 0
// Alternating 01 10 pattern for easy detection
for (i in 0 until FSKConstants.PREAMBLE_LENGTH) {
val symbol = if (i % 2 == 0) 1 else 2 // Alternating 01 and 10
val freq = getFrequencyForSymbol(symbol)
for (j in 0 until FSKConstants.SAMPLES_PER_SYMBOL) {
samples[sampleIndex++] = generateSample(freq)
}
}
return samples
}
// Convert bytes to 4FSK symbols (2 bits per symbol)
fun bytesToSymbols(data: ByteArray): IntArray {
val symbols = IntArray(data.size * 4) // 4 symbols per byte
var symbolIndex = 0
for (byte in data) {
val value = byte.toInt() and 0xFF
// Extract 2-bit symbols from MSB to LSB
symbols[symbolIndex++] = (value shr 6) and 0x03
symbols[symbolIndex++] = (value shr 4) and 0x03
symbols[symbolIndex++] = (value shr 2) and 0x03
symbols[symbolIndex++] = value and 0x03
}
return symbols
}
// Modulate symbols to audio samples with smooth transitions
fun modulateSymbols(symbols: IntArray): FloatArray {
val samples = FloatArray(symbols.size * FSKConstants.SAMPLES_PER_SYMBOL)
var sampleIndex = 0
for (i in symbols.indices) {
val currentFreq = getFrequencyForSymbol(symbols[i])
val nextFreq = if (i < symbols.size - 1) {
getFrequencyForSymbol(symbols[i + 1])
} else {
currentFreq
}
// Generate samples with smooth frequency transition
for (j in 0 until FSKConstants.SAMPLES_PER_SYMBOL) {
val progress = j.toFloat() / FSKConstants.SAMPLES_PER_SYMBOL
val freq = if (j >= FSKConstants.SAMPLES_PER_SYMBOL - 2) {
// Smooth transition in last 2 samples
currentFreq * (1 - progress) + nextFreq * progress
} else {
currentFreq
}
samples[sampleIndex++] = generateSample(freq)
}
}
return samples
}
// Generate single sample with continuous phase
private fun generateSample(frequency: Double): Float {
val sample = sin(2.0 * PI * phase).toFloat()
phase += frequency / FSKConstants.SAMPLE_RATE
// Keep phase in [0, 1] range to prevent precision loss
if (phase >= 1.0) {
phase -= 1.0
}
return sample
}
// Map symbol to frequency
private fun getFrequencyForSymbol(symbol: Int): Double {
return when (symbol) {
0 -> FSKConstants.FREQ_00
1 -> FSKConstants.FREQ_01
2 -> FSKConstants.FREQ_10
3 -> FSKConstants.FREQ_11
else -> throw IllegalArgumentException("Invalid symbol: $symbol")
}
}
// Apply raised cosine filter for spectral shaping
fun applyRaisedCosineFilter(samples: FloatArray): FloatArray {
val alpha = 0.35 // Roll-off factor
val filteredSamples = FloatArray(samples.size)
val filterLength = 65 // Filter taps
val halfLength = filterLength / 2
for (i in samples.indices) {
var sum = 0.0f
for (j in -halfLength..halfLength) {
val sampleIndex = i + j
if (sampleIndex in samples.indices) {
val t = j.toFloat() / FSKConstants.SAMPLES_PER_SYMBOL
val h = if (abs(t) < 1e-6) {
1.0f
} else if (abs(t) == 0.5f / alpha) {
(PI / 4) * sinc(0.5f / alpha).toFloat()
} else {
sinc(t) * cos(PI * alpha * t) / (1 - 4 * alpha * alpha * t * t)
}
sum += samples[sampleIndex] * h
}
}
filteredSamples[i] = sum * 0.8f // Scale to prevent clipping
}
return filteredSamples
}
private fun sinc(x: Float): Float {
return if (abs(x) < 1e-6) 1.0f else (sin(PI * x) / (PI * x)).toFloat()
}
// Reset modulator state
fun reset() {
phase = 0.0
symbolBuffer.clear()
}
}

View File

@ -0,0 +1,245 @@
package com.icing.dialer.modem
import java.nio.ByteBuffer
import java.util.zip.CRC32
class FrameProcessor {
private val crc32 = CRC32()
data class Frame(
val syncWord: Int = 0x7E6B2840.toInt(),
val sequenceNumber: Int,
val payloadLength: Int,
val payload: ByteArray,
val crc: Long
) {
fun toByteArray(): ByteArray {
val buffer = ByteBuffer.allocate(12 + payload.size + 4)
buffer.putInt(syncWord)
buffer.putInt(sequenceNumber)
buffer.putInt(payloadLength)
buffer.put(payload)
buffer.putInt(crc.toInt())
return buffer.array()
}
companion object {
fun fromByteArray(data: ByteArray): Frame? {
if (data.size < 16) return null
val buffer = ByteBuffer.wrap(data)
val syncWord = buffer.getInt()
if (syncWord != 0x7E6B2840.toInt()) return null
val sequenceNumber = buffer.getInt()
val payloadLength = buffer.getInt()
if (data.size < 16 + payloadLength) return null
val payload = ByteArray(payloadLength)
buffer.get(payload)
val crc = buffer.getInt().toLong() and 0xFFFFFFFFL
return Frame(syncWord, sequenceNumber, payloadLength, payload, crc)
}
}
}
// Reed-Solomon error correction
class ReedSolomon(private val dataBytes: Int, private val parityBytes: Int) {
private val totalBytes = dataBytes + parityBytes
private val gfPoly = 0x11D // Primitive polynomial for GF(256)
private val gfSize = 256
private val logTable = IntArray(gfSize)
private val expTable = IntArray(gfSize * 2)
init {
// Initialize Galois Field tables
var x = 1
for (i in 0 until gfSize - 1) {
expTable[i] = x
logTable[x] = i
x = x shl 1
if (x >= gfSize) {
x = x xor gfPoly
}
}
expTable[gfSize - 1] = expTable[0]
// Double the exp table for convenience
for (i in gfSize until gfSize * 2) {
expTable[i] = expTable[i - gfSize]
}
}
fun encode(data: ByteArray): ByteArray {
if (data.size != dataBytes) {
throw IllegalArgumentException("Data size must be $dataBytes bytes")
}
val encoded = ByteArray(totalBytes)
System.arraycopy(data, 0, encoded, 0, dataBytes)
// Generate parity bytes
val generator = generateGeneratorPolynomial()
for (i in 0 until dataBytes) {
val coef = encoded[i].toInt() and 0xFF
if (coef != 0) {
for (j in 1..parityBytes) {
encoded[i + j] = (encoded[i + j].toInt() xor
gfMultiply(generator[j], coef)).toByte()
}
}
}
// Move parity bytes to the end
System.arraycopy(encoded, dataBytes, encoded, dataBytes, parityBytes)
return encoded
}
fun decode(received: ByteArray): ByteArray? {
if (received.size != totalBytes) return null
val syndromes = calculateSyndromes(received)
if (syndromes.all { it == 0 }) {
// No errors
return received.copyOf(dataBytes)
}
// Berlekamp-Massey algorithm to find error locator polynomial
val errorLocator = findErrorLocator(syndromes)
val errorPositions = findErrorPositions(errorLocator)
if (errorPositions.size > parityBytes / 2) {
// Too many errors to correct
return null
}
// Forney algorithm to find error values
val errorValues = findErrorValues(syndromes, errorLocator, errorPositions)
// Correct errors
val corrected = received.copyOf()
for (i in errorPositions.indices) {
corrected[errorPositions[i]] =
(corrected[errorPositions[i]].toInt() xor errorValues[i]).toByte()
}
return corrected.copyOf(dataBytes)
}
private fun gfMultiply(a: Int, b: Int): Int {
if (a == 0 || b == 0) return 0
return expTable[logTable[a] + logTable[b]]
}
private fun generateGeneratorPolynomial(): IntArray {
val generator = IntArray(parityBytes + 1)
generator[0] = 1
for (i in 0 until parityBytes) {
generator[i + 1] = 1
for (j in i downTo 1) {
generator[j] = generator[j - 1] xor gfMultiply(generator[j], expTable[i])
}
generator[0] = gfMultiply(generator[0], expTable[i])
}
return generator
}
private fun calculateSyndromes(received: ByteArray): IntArray {
val syndromes = IntArray(parityBytes)
for (i in 0 until parityBytes) {
var syndrome = 0
for (j in 0 until totalBytes) {
syndrome = syndrome xor gfMultiply(received[j].toInt() and 0xFF,
expTable[(j * (i + 1)) % (gfSize - 1)])
}
syndromes[i] = syndrome
}
return syndromes
}
private fun findErrorLocator(syndromes: IntArray): IntArray {
// Simplified Berlekamp-Massey for demonstration
// In production, use a full implementation
val errorLocator = IntArray(parityBytes / 2 + 1)
errorLocator[0] = 1
return errorLocator
}
private fun findErrorPositions(errorLocator: IntArray): IntArray {
// Chien search
val positions = mutableListOf<Int>()
for (i in 0 until totalBytes) {
var sum = 0
for (j in errorLocator.indices) {
sum = sum xor gfMultiply(errorLocator[j],
expTable[(j * i) % (gfSize - 1)])
}
if (sum == 0) {
positions.add(totalBytes - 1 - i)
}
}
return positions.toIntArray()
}
private fun findErrorValues(syndromes: IntArray, errorLocator: IntArray,
errorPositions: IntArray): IntArray {
// Simplified Forney algorithm
val errorValues = IntArray(errorPositions.size)
// Implementation would go here
return errorValues
}
}
private var sequenceNumber = 0
private val rs = ReedSolomon(200, 56) // (256, 200) Reed-Solomon code
fun createFrame(data: ByteArray): Frame {
// Apply Reed-Solomon encoding
val encoded = rs.encode(data)
// Calculate CRC32
crc32.reset()
crc32.update(encoded)
val crc = crc32.value
val frame = Frame(
sequenceNumber = sequenceNumber++,
payloadLength = encoded.size,
payload = encoded,
crc = crc
)
return frame
}
fun processFrame(frameData: ByteArray): ByteArray? {
val frame = Frame.fromByteArray(frameData) ?: return null
// Verify CRC
crc32.reset()
crc32.update(frame.payload)
if (crc32.value != frame.crc) {
// CRC mismatch, try error correction
return null
}
// Decode Reed-Solomon
return rs.decode(frame.payload)
}
fun reset() {
sequenceNumber = 0
}
}

View File

@ -127,7 +127,7 @@ class MyInCallService : InCallService() {
}
call.registerCallback(callCallback)
if (callAudioState != null) {
val audioState = callAudioState
val audioState = callAudioState
channel?.invokeMethod("audioStateChanged", mapOf(
"route" to audioState.route,
"muted" to audioState.isMuted,

View File

@ -3,10 +3,15 @@
<uses-permission android:name="android.permission.WRITE_CONTACTS"/>
<uses-permission android:name="android.permission.CALL_PHONE" />
<uses-permission android:name="android.permission.SEND_SMS" />
<uses-permission android:name="android.permission.CAMERA"/>
<uses-permission android:name="android.permission.READ_BLOCKED_NUMBERS" />
<uses-permission android:name="android.permission.WRITE_BLOCKED_NUMBERS" />
<uses-permission android:name="android.permission.READ_CALL_LOG"/>
<uses-permission android:name="android.permission.READ_PHONE_STATE"/>
<uses-permission android:name="android.permission.MANAGE_OWN_CALLS" />
<uses-permission android:name="android.permission.ANSWER_PHONE_CALLS" />
<uses-permission android:name="android.permission.PROCESS_OUTGOING_CALLS" />
<uses-permission android:name="android.permission.POST_NOTIFICATIONS" />
<uses-permission android:name="android.permission.SYSTEM_ALERT_WINDOW" />
<uses-feature android:name="android.hardware.camera" android:required="false" />
<uses-feature android:name="android.hardware.telephony" android:required="true" />
<!-- The INTERNET permission is required for development. Specifically,
the Flutter tool needs it to communicate with the running application
to allow setting breakpoints, to provide hot reload, etc.

View File

@ -10,7 +10,7 @@ class AsymmetricCryptoService {
final String _aliasPrefix = 'icing_';
final Uuid _uuid = Uuid();
/// Generates an ECDSA P-256 key pair with a unique alias and stores its metadata.
/// Generates an ED25519 key pair with a unique alias and stores its metadata.
Future<String> generateKeyPair({String? label}) async {
try {
// Generate a unique identifier for the key

View File

@ -0,0 +1,81 @@
# DryBox - Secure Voice Communication System
A PyQt5-based application demonstrating secure voice communication using the Noise XK protocol, Codec2 audio compression, and 4FSK modulation.
## Features
- **Secure Communication**: End-to-end encryption using Noise XK protocol
- **Audio Compression**: Codec2 (3200bps) for efficient voice transmission
- **Modulation**: 4FSK (4-level Frequency Shift Keying) for robust transmission
- **GSM Network Simulation**: Simulates realistic GSM network conditions
- **Real-time Audio**: Playback and recording capabilities
- **Visual Feedback**: Waveform displays and signal strength indicators
## Requirements
- Python 3.7+
- PyQt5
- NumPy
- pycodec2
- Additional dependencies in `requirements.txt`
## Installation
1. Install system dependencies:
```bash
./install_audio_deps.sh
```
2. Install Python dependencies:
```bash
pip install -r requirements.txt
```
## Running the Application
Simply run:
```bash
python3 UI/main.py
```
The application will automatically:
- Start the GSM network simulator
- Initialize two phone clients
- Display the main UI with GSM status panel
## Usage
### Phone Controls
- **Click "Call" button** or press `1`/`2` to initiate/answer calls
- **Ctrl+1/2**: Toggle audio playback for each phone
- **Alt+1/2**: Toggle audio recording for each phone
### GSM Settings
- **Click "Settings" button** or press `Ctrl+G` to open GSM settings dialog
- Adjust signal strength, quality, noise, and network parameters
- Use presets for quick configuration (Excellent/Good/Fair/Poor)
### Other Controls
- **Space**: Run automatic test sequence
- **Ctrl+L**: Clear debug console
- **Ctrl+A**: Audio processing options menu
## Architecture
- **main.py**: Main UI application
- **phone_manager.py**: Manages phone instances and audio
- **protocol_phone_client.py**: Implements the secure protocol stack
- **noise_wrapper.py**: Noise XK protocol implementation
- **gsm_simulator.py**: Network simulation relay
- **gsm_status_widget.py**: Real-time GSM status display
## Testing
The automatic test feature (`Space` key) runs through a complete call sequence:
1. Initial state verification
2. Call initiation
3. Call answering
4. Noise XK handshake
5. Voice session establishment
6. Audio transmission
7. Call termination

View File

@ -0,0 +1,325 @@
import wave
import threading
import queue
import time
import os
from datetime import datetime
from PyQt5.QtCore import QObject, pyqtSignal
# Try to import PyAudio, but handle if it's not available
try:
import pyaudio
PYAUDIO_AVAILABLE = True
except ImportError:
PYAUDIO_AVAILABLE = False
print("Warning: PyAudio not installed. Audio playback will be disabled.")
print("To enable playback, install with: sudo dnf install python3-devel portaudio-devel && pip install pyaudio")
class AudioPlayer(QObject):
playback_started = pyqtSignal(int) # client_id
playback_stopped = pyqtSignal(int) # client_id
recording_saved = pyqtSignal(int, str) # client_id, filepath
def __init__(self):
super().__init__()
self.audio = None
self.streams = {} # client_id -> stream
self.buffers = {} # client_id -> queue
self.recording_buffers = {} # client_id -> list of audio data
self.recording_enabled = {} # client_id -> bool
self.playback_enabled = {} # client_id -> bool
self.sample_rate = 8000
self.channels = 1
self.chunk_size = 320 # 40ms at 8kHz
self.debug_callback = None
self.actual_sample_rate = 8000 # Will be updated if needed
if PYAUDIO_AVAILABLE:
try:
self.audio = pyaudio.PyAudio()
except Exception as e:
self.debug(f"Failed to initialize PyAudio: {e}")
self.audio = None
else:
self.audio = None
self.debug("PyAudio not available - playback disabled, recording still works")
def debug(self, message):
if self.debug_callback:
self.debug_callback(f"[AudioPlayer] {message}")
else:
print(f"[AudioPlayer] {message}")
def set_debug_callback(self, callback):
self.debug_callback = callback
def start_playback(self, client_id):
"""Start audio playback for a client"""
if not self.audio:
self.debug("Audio playback not available - PyAudio not installed")
self.debug("To enable: sudo dnf install python3-devel portaudio-devel && pip install pyaudio")
return False
if client_id in self.streams:
self.debug(f"Playback already active for client {client_id}")
return False
try:
# Create buffer for this client
self.buffers[client_id] = queue.Queue(maxsize=100) # Limit queue size
self.playback_enabled[client_id] = True
# Try different sample rates if 8000 Hz fails
sample_rates = [8000, 16000, 44100, 48000]
stream = None
for rate in sample_rates:
try:
# Adjust buffer size based on sample rate
buffer_frames = int(640 * rate / 8000) # Scale buffer size
# Create audio stream with callback for continuous playback
def audio_callback(in_data, frame_count, time_info, status):
if status:
self.debug(f"Playback status for client {client_id}: {status}")
# Get audio data from buffer
audio_data = b''
bytes_needed = frame_count * 2 # 16-bit samples
# Try to get enough data for the requested frame count
while len(audio_data) < bytes_needed:
try:
chunk = self.buffers[client_id].get_nowait()
# Resample if needed
if self.actual_sample_rate != self.sample_rate:
chunk = self._resample_audio(chunk, self.sample_rate, self.actual_sample_rate)
audio_data += chunk
except queue.Empty:
# No more data available, pad with silence
if len(audio_data) < bytes_needed:
silence = b'\x00' * (bytes_needed - len(audio_data))
audio_data += silence
break
# Trim to exact size if we got too much
if len(audio_data) > bytes_needed:
# Put extra back in queue
extra = audio_data[bytes_needed:]
try:
self.buffers[client_id].put_nowait(extra)
except queue.Full:
pass
audio_data = audio_data[:bytes_needed]
return (audio_data, pyaudio.paContinue)
# Try to create stream with current sample rate
stream = self.audio.open(
format=pyaudio.paInt16,
channels=self.channels,
rate=rate,
output=True,
frames_per_buffer=buffer_frames,
stream_callback=audio_callback
)
self.actual_sample_rate = rate
if rate != self.sample_rate:
self.debug(f"Using sample rate {rate} Hz (resampling from {self.sample_rate} Hz)")
break # Success!
except Exception as e:
if rate == sample_rates[-1]: # Last attempt
raise e
else:
self.debug(f"Sample rate {rate} Hz failed, trying next...")
continue
if not stream:
raise Exception("Could not create audio stream with any sample rate")
self.streams[client_id] = stream
stream.start_stream()
self.debug(f"Started callback-based playback for client {client_id} at {self.actual_sample_rate} Hz")
self.playback_started.emit(client_id)
return True
except Exception as e:
self.debug(f"Failed to start playback for client {client_id}: {e}")
self.playback_enabled[client_id] = False
if client_id in self.buffers:
del self.buffers[client_id]
return False
def stop_playback(self, client_id):
"""Stop audio playback for a client"""
if client_id not in self.streams:
return
self.playback_enabled[client_id] = False
# Stop and close stream
if client_id in self.streams:
try:
self.streams[client_id].stop_stream()
self.streams[client_id].close()
except:
pass
del self.streams[client_id]
# Clear buffer
if client_id in self.buffers:
# Clear any remaining data
while not self.buffers[client_id].empty():
try:
self.buffers[client_id].get_nowait()
except:
break
del self.buffers[client_id]
self.debug(f"Stopped playback for client {client_id}")
self.playback_stopped.emit(client_id)
def add_audio_data(self, client_id, pcm_data):
"""Add audio data to playback buffer"""
# Initialize frame counter for debug logging
if not hasattr(self, '_frame_count'):
self._frame_count = {}
if client_id not in self._frame_count:
self._frame_count[client_id] = 0
self._frame_count[client_id] += 1
# Only log occasionally to avoid spam
if self._frame_count[client_id] == 1 or self._frame_count[client_id] % 25 == 0:
self.debug(f"Client {client_id} audio frame #{self._frame_count[client_id]}: {len(pcm_data)} bytes")
if client_id in self.buffers:
try:
# Use put_nowait to avoid blocking
self.buffers[client_id].put_nowait(pcm_data)
if self._frame_count[client_id] == 1:
self.debug(f"Client {client_id} buffer started, queue size: {self.buffers[client_id].qsize()}")
except queue.Full:
# Buffer is full, drop oldest data to make room
try:
self.buffers[client_id].get_nowait() # Remove oldest
self.buffers[client_id].put_nowait(pcm_data) # Add newest
if self._frame_count[client_id] % 50 == 0: # Log occasionally
self.debug(f"Client {client_id} buffer overflow, dropping old data")
except:
pass
else:
if self._frame_count[client_id] == 1:
self.debug(f"Client {client_id} has no buffer (playback not started?)")
# Add to recording buffer if recording
if self.recording_enabled.get(client_id, False):
if client_id not in self.recording_buffers:
self.recording_buffers[client_id] = []
self.recording_buffers[client_id].append(pcm_data)
def start_recording(self, client_id):
"""Start recording received audio"""
self.recording_enabled[client_id] = True
self.recording_buffers[client_id] = []
self.debug(f"Started recording for client {client_id}")
def stop_recording(self, client_id, save_path=None):
"""Stop recording and optionally save to file"""
if not self.recording_enabled.get(client_id, False):
return None
self.recording_enabled[client_id] = False
if client_id not in self.recording_buffers:
return None
audio_data = self.recording_buffers[client_id]
if not audio_data:
self.debug(f"No audio data recorded for client {client_id}")
return None
# Generate filename if not provided
if not save_path:
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
save_path = f"wav/received_client{client_id}_{timestamp}.wav"
# Ensure directory exists
save_dir = os.path.dirname(save_path)
if save_dir:
os.makedirs(save_dir, exist_ok=True)
try:
# Combine all audio chunks
combined_audio = b''.join(audio_data)
# Save as WAV file
with wave.open(save_path, 'wb') as wav_file:
wav_file.setnchannels(self.channels)
wav_file.setsampwidth(2) # 16-bit
wav_file.setframerate(self.sample_rate) # Always save at original 8kHz
wav_file.writeframes(combined_audio)
self.debug(f"Saved recording for client {client_id} to {save_path}")
self.recording_saved.emit(client_id, save_path)
# Clear recording buffer
del self.recording_buffers[client_id]
return save_path
except Exception as e:
self.debug(f"Failed to save recording for client {client_id}: {e}")
return None
def _resample_audio(self, audio_data, from_rate, to_rate):
"""Simple linear resampling of audio data"""
if from_rate == to_rate:
return audio_data
import struct
# Convert bytes to samples
samples = struct.unpack(f'{len(audio_data)//2}h', audio_data)
# Calculate resampling ratio
ratio = to_rate / from_rate
new_length = int(len(samples) * ratio)
# Simple linear interpolation
resampled = []
for i in range(new_length):
# Find position in original samples
pos = i / ratio
idx = int(pos)
frac = pos - idx
if idx < len(samples) - 1:
# Linear interpolation between samples
sample = int(samples[idx] * (1 - frac) + samples[idx + 1] * frac)
else:
# Use last sample
sample = samples[-1] if samples else 0
resampled.append(sample)
# Convert back to bytes
return struct.pack(f'{len(resampled)}h', *resampled)
def cleanup(self):
"""Clean up audio resources"""
# Stop all playback
for client_id in list(self.streams.keys()):
self.stop_playback(client_id)
# Terminate PyAudio
if self.audio:
self.audio.terminate()
self.audio = None

View File

@ -0,0 +1,220 @@
import numpy as np
import wave
import os
from datetime import datetime
from PyQt5.QtCore import QObject, pyqtSignal
import struct
class AudioProcessor(QObject):
processing_complete = pyqtSignal(str) # filepath
def __init__(self):
super().__init__()
self.debug_callback = None
def debug(self, message):
if self.debug_callback:
self.debug_callback(f"[AudioProcessor] {message}")
else:
print(f"[AudioProcessor] {message}")
def set_debug_callback(self, callback):
self.debug_callback = callback
def apply_gain(self, audio_data, gain_db):
"""Apply gain to audio data"""
# Convert bytes to numpy array
samples = np.frombuffer(audio_data, dtype=np.int16)
# Apply gain
gain_linear = 10 ** (gain_db / 20.0)
samples_float = samples.astype(np.float32) * gain_linear
# Clip to prevent overflow
samples_float = np.clip(samples_float, -32768, 32767)
# Convert back to int16
return samples_float.astype(np.int16).tobytes()
def apply_noise_gate(self, audio_data, threshold_db=-40):
"""Apply noise gate to remove low-level noise"""
samples = np.frombuffer(audio_data, dtype=np.int16)
# Calculate RMS in dB
rms = np.sqrt(np.mean(samples.astype(np.float32) ** 2))
rms_db = 20 * np.log10(max(rms, 1e-10))
# Gate the audio if below threshold
if rms_db < threshold_db:
return np.zeros_like(samples, dtype=np.int16).tobytes()
return audio_data
def apply_low_pass_filter(self, audio_data, cutoff_hz=3400, sample_rate=8000):
"""Apply simple low-pass filter"""
samples = np.frombuffer(audio_data, dtype=np.int16).astype(np.float32)
# Simple moving average filter
# Calculate filter length based on cutoff frequency
filter_length = int(sample_rate / cutoff_hz)
if filter_length < 3:
filter_length = 3
# Apply moving average
filtered = np.convolve(samples, np.ones(filter_length) / filter_length, mode='same')
return filtered.astype(np.int16).tobytes()
def apply_high_pass_filter(self, audio_data, cutoff_hz=300, sample_rate=8000):
"""Apply simple high-pass filter"""
samples = np.frombuffer(audio_data, dtype=np.int16).astype(np.float32)
# Simple differentiator as high-pass
filtered = np.diff(samples, prepend=samples[0])
# Scale to maintain amplitude
scale = cutoff_hz / (sample_rate / 2)
filtered *= scale
return filtered.astype(np.int16).tobytes()
def normalize_audio(self, audio_data, target_db=-3):
"""Normalize audio to target dB level"""
samples = np.frombuffer(audio_data, dtype=np.int16).astype(np.float32)
# Find peak
peak = np.max(np.abs(samples))
if peak == 0:
return audio_data
# Calculate current peak in dB
current_db = 20 * np.log10(peak / 32768.0)
# Calculate gain needed
gain_db = target_db - current_db
# Apply gain
return self.apply_gain(audio_data, gain_db)
def remove_silence(self, audio_data, threshold_db=-40, min_silence_ms=100, sample_rate=8000):
"""Remove silence from audio"""
samples = np.frombuffer(audio_data, dtype=np.int16)
# Calculate frame size for silence detection
frame_size = int(sample_rate * min_silence_ms / 1000)
# Detect non-silent regions
non_silent_regions = []
i = 0
while i < len(samples):
frame = samples[i:i+frame_size]
if len(frame) == 0:
break
# Calculate RMS of frame
rms = np.sqrt(np.mean(frame.astype(np.float32) ** 2))
rms_db = 20 * np.log10(max(rms, 1e-10))
if rms_db > threshold_db:
# Found non-silent region, find its extent
start = i
while i < len(samples):
frame = samples[i:i+frame_size]
if len(frame) == 0:
break
rms = np.sqrt(np.mean(frame.astype(np.float32) ** 2))
rms_db = 20 * np.log10(max(rms, 1e-10))
if rms_db <= threshold_db:
break
i += frame_size
non_silent_regions.append((start, i))
else:
i += frame_size
# Combine non-silent regions
if not non_silent_regions:
return audio_data # Return original if all silent
combined = []
for start, end in non_silent_regions:
combined.extend(samples[start:end])
return np.array(combined, dtype=np.int16).tobytes()
def save_processed_audio(self, audio_data, original_path, processing_type):
"""Save processed audio with descriptive filename"""
# Generate new filename
base_name = os.path.splitext(os.path.basename(original_path))[0]
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
new_filename = f"{base_name}_{processing_type}_{timestamp}.wav"
# Ensure directory exists
save_dir = os.path.dirname(original_path)
if not save_dir:
save_dir = "wav"
os.makedirs(save_dir, exist_ok=True)
save_path = os.path.join(save_dir, new_filename)
try:
with wave.open(save_path, 'wb') as wav_file:
wav_file.setnchannels(1)
wav_file.setsampwidth(2)
wav_file.setframerate(8000)
wav_file.writeframes(audio_data)
self.debug(f"Saved processed audio to {save_path}")
self.processing_complete.emit(save_path)
return save_path
except Exception as e:
self.debug(f"Failed to save processed audio: {e}")
return None
def concatenate_audio_files(self, file_paths, output_path=None):
"""Concatenate multiple audio files"""
if not file_paths:
return None
combined_data = b''
sample_rate = None
for file_path in file_paths:
try:
with wave.open(file_path, 'rb') as wav_file:
if sample_rate is None:
sample_rate = wav_file.getframerate()
elif wav_file.getframerate() != sample_rate:
self.debug(f"Sample rate mismatch in {file_path}")
continue
data = wav_file.readframes(wav_file.getnframes())
combined_data += data
except Exception as e:
self.debug(f"Failed to read {file_path}: {e}")
if not combined_data:
return None
# Save concatenated audio
if not output_path:
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
output_path = f"wav/concatenated_{timestamp}.wav"
os.makedirs(os.path.dirname(output_path), exist_ok=True)
try:
with wave.open(output_path, 'wb') as wav_file:
wav_file.setnchannels(1)
wav_file.setsampwidth(2)
wav_file.setframerate(sample_rate or 8000)
wav_file.writeframes(combined_data)
self.debug(f"Saved concatenated audio to {output_path}")
return output_path
except Exception as e:
self.debug(f"Failed to save concatenated audio: {e}")
return None

View File

@ -1,79 +0,0 @@
# client_state.py
from queue import Queue
from session import NoiseXKSession
import time
class ClientState:
def __init__(self, client_id):
self.client_id = client_id
self.command_queue = Queue()
self.initiator = None
self.keypair = None
self.peer_pubkey = None
self.session = None
self.handshake_in_progress = False
self.handshake_start_time = None
self.call_active = False
def process_command(self, client):
"""Process commands from the queue."""
if not self.command_queue.empty():
print(f"Client {self.client_id} processing command queue, size: {self.command_queue.qsize()}")
command = self.command_queue.get()
if command == "handshake":
try:
print(f"Client {self.client_id} starting handshake, initiator: {self.initiator}")
self.session = NoiseXKSession(self.keypair, self.peer_pubkey)
self.session.handshake(client.sock, self.initiator)
print(f"Client {self.client_id} handshake complete")
client.send("HANDSHAKE_DONE")
except Exception as e:
print(f"Client {self.client_id} handshake failed: {e}")
client.state_changed.emit("CALL_END", "", self.client_id)
finally:
self.handshake_in_progress = False
self.handshake_start_time = None
def start_handshake(self, initiator, keypair, peer_pubkey):
"""Queue handshake command."""
self.initiator = initiator
self.keypair = keypair
self.peer_pubkey = peer_pubkey
print(f"Client {self.client_id} queuing handshake, initiator: {initiator}")
self.handshake_in_progress = True
self.handshake_start_time = time.time()
self.command_queue.put("handshake")
def handle_data(self, client, data):
"""Handle received data (control or audio)."""
try:
decoded_data = data.decode('utf-8').strip()
print(f"Client {self.client_id} received raw: {decoded_data}")
if decoded_data in ["RINGING", "CALL_END", "CALL_DROPPED", "IN_CALL", "HANDSHAKE", "HANDSHAKE_DONE"]:
client.state_changed.emit(decoded_data, decoded_data, self.client_id)
if decoded_data == "HANDSHAKE":
self.handshake_in_progress = True
elif decoded_data == "HANDSHAKE_DONE":
self.call_active = True
else:
print(f"Client {self.client_id} ignored unexpected text message: {decoded_data}")
except UnicodeDecodeError:
if self.call_active and self.session:
try:
print(f"Client {self.client_id} received audio packet, length={len(data)}")
decrypted_data = self.session.decrypt(data)
print(f"Client {self.client_id} decrypted audio packet, length={len(decrypted_data)}")
client.data_received.emit(decrypted_data, self.client_id)
except Exception as e:
print(f"Client {self.client_id} failed to process audio packet: {e}")
else:
print(f"Client {self.client_id} ignored non-text message: {data.hex()}")
def check_handshake_timeout(self, client):
"""Check for handshake timeout."""
if self.handshake_in_progress and self.handshake_start_time:
if time.time() - self.handshake_start_time > 30:
print(f"Client {self.client_id} handshake timeout after 30s")
client.state_changed.emit("CALL_END", "", self.client_id)
self.handshake_in_progress = False
self.handshake_start_time = None

View File

@ -0,0 +1,275 @@
from PyQt5.QtWidgets import (
QDialog, QVBoxLayout, QHBoxLayout, QLabel, QSlider, QSpinBox,
QPushButton, QGroupBox, QGridLayout, QComboBox, QCheckBox,
QDialogButtonBox
)
from PyQt5.QtCore import Qt, pyqtSignal
from PyQt5.QtGui import QFont
class GSMSettingsDialog(QDialog):
settings_changed = pyqtSignal(dict)
def __init__(self, parent=None):
super().__init__(parent)
self.setWindowTitle("GSM Simulation Settings")
self.setModal(True)
self.setMinimumWidth(500)
# Default settings
self.settings = {
'signal_strength': -70, # dBm
'signal_quality': 75, # percentage
'noise_level': 10, # percentage
'codec_mode': 'AMR-NB',
'bitrate': 12.2, # kbps
'packet_loss': 0, # percentage
'jitter': 20, # ms
'latency': 100, # ms
'fading_enabled': False,
'fading_speed': 'slow',
'interference_enabled': False,
'handover_enabled': False
}
self.init_ui()
def init_ui(self):
layout = QVBoxLayout()
# Title
title = QLabel("GSM Network Simulation Parameters")
title.setFont(QFont("Arial", 14, QFont.Bold))
title.setAlignment(Qt.AlignCenter)
layout.addWidget(title)
# Signal Quality Group
signal_group = QGroupBox("Signal Quality")
signal_layout = QGridLayout()
# Signal Strength
signal_layout.addWidget(QLabel("Signal Strength (dBm):"), 0, 0)
self.signal_strength_slider = QSlider(Qt.Horizontal)
self.signal_strength_slider.setRange(-120, -40)
self.signal_strength_slider.setValue(self.settings['signal_strength'])
self.signal_strength_slider.setTickPosition(QSlider.TicksBelow)
self.signal_strength_slider.setTickInterval(10)
signal_layout.addWidget(self.signal_strength_slider, 0, 1)
self.signal_strength_label = QLabel(f"{self.settings['signal_strength']} dBm")
signal_layout.addWidget(self.signal_strength_label, 0, 2)
# Signal Quality
signal_layout.addWidget(QLabel("Signal Quality (%):"), 1, 0)
self.signal_quality_slider = QSlider(Qt.Horizontal)
self.signal_quality_slider.setRange(0, 100)
self.signal_quality_slider.setValue(self.settings['signal_quality'])
self.signal_quality_slider.setTickPosition(QSlider.TicksBelow)
self.signal_quality_slider.setTickInterval(10)
signal_layout.addWidget(self.signal_quality_slider, 1, 1)
self.signal_quality_label = QLabel(f"{self.settings['signal_quality']}%")
signal_layout.addWidget(self.signal_quality_label, 1, 2)
# Noise Level
signal_layout.addWidget(QLabel("Noise Level (%):"), 2, 0)
self.noise_slider = QSlider(Qt.Horizontal)
self.noise_slider.setRange(0, 50)
self.noise_slider.setValue(self.settings['noise_level'])
self.noise_slider.setTickPosition(QSlider.TicksBelow)
self.noise_slider.setTickInterval(5)
signal_layout.addWidget(self.noise_slider, 2, 1)
self.noise_label = QLabel(f"{self.settings['noise_level']}%")
signal_layout.addWidget(self.noise_label, 2, 2)
signal_group.setLayout(signal_layout)
layout.addWidget(signal_group)
# Codec Settings Group
codec_group = QGroupBox("Voice Codec Settings")
codec_layout = QGridLayout()
# Codec Type
codec_layout.addWidget(QLabel("Codec Type:"), 0, 0)
self.codec_combo = QComboBox()
self.codec_combo.addItems(['AMR-NB', 'AMR-WB', 'EVS', 'GSM-FR', 'GSM-EFR'])
self.codec_combo.setCurrentText(self.settings['codec_mode'])
codec_layout.addWidget(self.codec_combo, 0, 1)
# Bitrate
codec_layout.addWidget(QLabel("Bitrate (kbps):"), 1, 0)
self.bitrate_spin = QSpinBox()
self.bitrate_spin.setRange(4, 24)
self.bitrate_spin.setSingleStep(1)
self.bitrate_spin.setValue(int(self.settings['bitrate']))
self.bitrate_spin.setSuffix(" kbps")
codec_layout.addWidget(self.bitrate_spin, 1, 1)
codec_group.setLayout(codec_layout)
layout.addWidget(codec_group)
# Network Conditions Group
network_group = QGroupBox("Network Conditions")
network_layout = QGridLayout()
# Packet Loss
network_layout.addWidget(QLabel("Packet Loss (%):"), 0, 0)
self.packet_loss_spin = QSpinBox()
self.packet_loss_spin.setRange(0, 20)
self.packet_loss_spin.setValue(self.settings['packet_loss'])
self.packet_loss_spin.setSuffix("%")
network_layout.addWidget(self.packet_loss_spin, 0, 1)
# Jitter
network_layout.addWidget(QLabel("Jitter (ms):"), 1, 0)
self.jitter_spin = QSpinBox()
self.jitter_spin.setRange(0, 200)
self.jitter_spin.setValue(self.settings['jitter'])
self.jitter_spin.setSuffix(" ms")
network_layout.addWidget(self.jitter_spin, 1, 1)
# Latency
network_layout.addWidget(QLabel("Latency (ms):"), 2, 0)
self.latency_spin = QSpinBox()
self.latency_spin.setRange(20, 500)
self.latency_spin.setValue(self.settings['latency'])
self.latency_spin.setSuffix(" ms")
network_layout.addWidget(self.latency_spin, 2, 1)
network_group.setLayout(network_layout)
layout.addWidget(network_group)
# Advanced Features Group
advanced_group = QGroupBox("Advanced Features")
advanced_layout = QGridLayout()
# Fading
self.fading_check = QCheckBox("Enable Fading")
self.fading_check.setChecked(self.settings['fading_enabled'])
advanced_layout.addWidget(self.fading_check, 0, 0)
self.fading_combo = QComboBox()
self.fading_combo.addItems(['slow', 'medium', 'fast'])
self.fading_combo.setCurrentText(self.settings['fading_speed'])
self.fading_combo.setEnabled(self.settings['fading_enabled'])
advanced_layout.addWidget(self.fading_combo, 0, 1)
# Interference
self.interference_check = QCheckBox("Enable Interference")
self.interference_check.setChecked(self.settings['interference_enabled'])
advanced_layout.addWidget(self.interference_check, 1, 0)
# Handover
self.handover_check = QCheckBox("Enable Handover Simulation")
self.handover_check.setChecked(self.settings['handover_enabled'])
advanced_layout.addWidget(self.handover_check, 2, 0)
advanced_group.setLayout(advanced_layout)
layout.addWidget(advanced_group)
# Preset buttons
preset_layout = QHBoxLayout()
preset_layout.addWidget(QLabel("Presets:"))
excellent_btn = QPushButton("Excellent")
excellent_btn.clicked.connect(self.set_excellent_preset)
preset_layout.addWidget(excellent_btn)
good_btn = QPushButton("Good")
good_btn.clicked.connect(self.set_good_preset)
preset_layout.addWidget(good_btn)
fair_btn = QPushButton("Fair")
fair_btn.clicked.connect(self.set_fair_preset)
preset_layout.addWidget(fair_btn)
poor_btn = QPushButton("Poor")
poor_btn.clicked.connect(self.set_poor_preset)
preset_layout.addWidget(poor_btn)
layout.addLayout(preset_layout)
# Dialog buttons
buttons = QDialogButtonBox(
QDialogButtonBox.Ok | QDialogButtonBox.Cancel,
Qt.Horizontal, self
)
buttons.accepted.connect(self.accept)
buttons.rejected.connect(self.reject)
layout.addWidget(buttons)
self.setLayout(layout)
# Connect signals
self.signal_strength_slider.valueChanged.connect(
lambda v: self.signal_strength_label.setText(f"{v} dBm")
)
self.signal_quality_slider.valueChanged.connect(
lambda v: self.signal_quality_label.setText(f"{v}%")
)
self.noise_slider.valueChanged.connect(
lambda v: self.noise_label.setText(f"{v}%")
)
self.fading_check.toggled.connect(self.fading_combo.setEnabled)
def get_settings(self):
"""Get current settings"""
self.settings['signal_strength'] = self.signal_strength_slider.value()
self.settings['signal_quality'] = self.signal_quality_slider.value()
self.settings['noise_level'] = self.noise_slider.value()
self.settings['codec_mode'] = self.codec_combo.currentText()
self.settings['bitrate'] = self.bitrate_spin.value()
self.settings['packet_loss'] = self.packet_loss_spin.value()
self.settings['jitter'] = self.jitter_spin.value()
self.settings['latency'] = self.latency_spin.value()
self.settings['fading_enabled'] = self.fading_check.isChecked()
self.settings['fading_speed'] = self.fading_combo.currentText()
self.settings['interference_enabled'] = self.interference_check.isChecked()
self.settings['handover_enabled'] = self.handover_check.isChecked()
return self.settings
def set_excellent_preset(self):
"""Set excellent signal conditions"""
self.signal_strength_slider.setValue(-50)
self.signal_quality_slider.setValue(95)
self.noise_slider.setValue(5)
self.packet_loss_spin.setValue(0)
self.jitter_spin.setValue(10)
self.latency_spin.setValue(50)
self.fading_check.setChecked(False)
self.interference_check.setChecked(False)
def set_good_preset(self):
"""Set good signal conditions"""
self.signal_strength_slider.setValue(-70)
self.signal_quality_slider.setValue(75)
self.noise_slider.setValue(10)
self.packet_loss_spin.setValue(1)
self.jitter_spin.setValue(20)
self.latency_spin.setValue(100)
self.fading_check.setChecked(False)
self.interference_check.setChecked(False)
def set_fair_preset(self):
"""Set fair signal conditions"""
self.signal_strength_slider.setValue(-85)
self.signal_quality_slider.setValue(50)
self.noise_slider.setValue(20)
self.packet_loss_spin.setValue(3)
self.jitter_spin.setValue(50)
self.latency_spin.setValue(150)
self.fading_check.setChecked(True)
self.fading_combo.setCurrentText('medium')
self.interference_check.setChecked(False)
def set_poor_preset(self):
"""Set poor signal conditions"""
self.signal_strength_slider.setValue(-100)
self.signal_quality_slider.setValue(25)
self.noise_slider.setValue(35)
self.packet_loss_spin.setValue(8)
self.jitter_spin.setValue(100)
self.latency_spin.setValue(300)
self.fading_check.setChecked(True)
self.fading_combo.setCurrentText('fast')
self.interference_check.setChecked(True)

View File

@ -0,0 +1,330 @@
from PyQt5.QtWidgets import (
QWidget, QVBoxLayout, QHBoxLayout, QLabel, QFrame,
QGridLayout, QProgressBar
)
from PyQt5.QtCore import Qt, pyqtSignal, QTimer
from PyQt5.QtGui import QFont, QPalette, QColor, QPainter, QBrush, QLinearGradient
import math
class SignalStrengthWidget(QWidget):
"""Custom widget to display signal strength bars"""
def __init__(self, parent=None):
super().__init__(parent)
self.signal_strength = -70 # dBm
self.setMinimumSize(60, 40)
self.setMaximumSize(80, 50)
def set_signal_strength(self, dbm):
self.signal_strength = dbm
self.update()
def paintEvent(self, event):
painter = QPainter(self)
painter.setRenderHint(QPainter.Antialiasing)
# Calculate number of bars based on signal strength
# -50 dBm or better = 5 bars
# -60 dBm = 4 bars
# -70 dBm = 3 bars
# -80 dBm = 2 bars
# -90 dBm = 1 bar
# < -90 dBm = 0 bars
if self.signal_strength >= -50:
active_bars = 5
elif self.signal_strength >= -60:
active_bars = 4
elif self.signal_strength >= -70:
active_bars = 3
elif self.signal_strength >= -80:
active_bars = 2
elif self.signal_strength >= -90:
active_bars = 1
else:
active_bars = 0
bar_width = 10
bar_spacing = 3
max_height = 35
for i in range(5):
x = i * (bar_width + bar_spacing) + 5
bar_height = (i + 1) * 7
y = max_height - bar_height + 10
if i < active_bars:
# Active bar - gradient from green to yellow to red based on strength
if self.signal_strength >= -60:
color = QColor(76, 175, 80) # Green
elif self.signal_strength >= -75:
color = QColor(255, 193, 7) # Amber
else:
color = QColor(244, 67, 54) # Red
else:
# Inactive bar
color = QColor(60, 60, 60)
painter.fillRect(x, y, bar_width, bar_height, color)
class GSMStatusWidget(QFrame):
"""Widget to display GSM network status and parameters"""
def __init__(self, parent=None):
super().__init__(parent)
self.setObjectName("gsmStatusWidget")
self.setFrameStyle(QFrame.StyledPanel)
# Default settings
self.settings = {
'signal_strength': -70,
'signal_quality': 75,
'noise_level': 10,
'codec_mode': 'AMR-NB',
'bitrate': 12.2,
'packet_loss': 0,
'jitter': 20,
'latency': 100,
'fading_enabled': False,
'fading_speed': 'slow',
'interference_enabled': False,
'handover_enabled': False
}
self.init_ui()
self.update_display()
def init_ui(self):
"""Initialize the UI components"""
main_layout = QVBoxLayout()
main_layout.setSpacing(10)
main_layout.setContentsMargins(15, 15, 15, 15)
# Title
title_label = QLabel("📡 GSM Network Status")
title_label.setObjectName("gsmStatusTitle")
title_label.setAlignment(Qt.AlignCenter)
main_layout.addWidget(title_label)
# Main status grid
status_grid = QGridLayout()
status_grid.setSpacing(12)
# Row 0: Signal strength visualization
status_grid.addWidget(QLabel("Signal:"), 0, 0)
self.signal_widget = SignalStrengthWidget()
status_grid.addWidget(self.signal_widget, 0, 1)
self.signal_dbm_label = QLabel("-70 dBm")
self.signal_dbm_label.setObjectName("signalDbmLabel")
status_grid.addWidget(self.signal_dbm_label, 0, 2)
# Row 1: Signal quality bar
status_grid.addWidget(QLabel("Quality:"), 1, 0)
self.quality_bar = QProgressBar()
self.quality_bar.setTextVisible(True)
self.quality_bar.setRange(0, 100)
self.quality_bar.setObjectName("qualityBar")
status_grid.addWidget(self.quality_bar, 1, 1, 1, 2)
# Row 2: Noise level
status_grid.addWidget(QLabel("Noise:"), 2, 0)
self.noise_bar = QProgressBar()
self.noise_bar.setTextVisible(True)
self.noise_bar.setRange(0, 50)
self.noise_bar.setObjectName("noiseBar")
status_grid.addWidget(self.noise_bar, 2, 1, 1, 2)
main_layout.addLayout(status_grid)
# Separator
separator1 = QFrame()
separator1.setFrameShape(QFrame.HLine)
separator1.setObjectName("gsmSeparator")
main_layout.addWidget(separator1)
# Codec info section
codec_layout = QHBoxLayout()
codec_layout.setSpacing(15)
codec_icon = QLabel("🎤")
codec_layout.addWidget(codec_icon)
self.codec_label = QLabel("AMR-NB @ 12.2 kbps")
self.codec_label.setObjectName("codecLabel")
codec_layout.addWidget(self.codec_label)
codec_layout.addStretch()
main_layout.addLayout(codec_layout)
# Network conditions section
network_grid = QGridLayout()
network_grid.setSpacing(8)
# Packet loss indicator
network_grid.addWidget(QLabel("📉"), 0, 0)
self.packet_loss_label = QLabel("Loss: 0%")
self.packet_loss_label.setObjectName("networkParam")
network_grid.addWidget(self.packet_loss_label, 0, 1)
# Jitter indicator
network_grid.addWidget(QLabel("📊"), 0, 2)
self.jitter_label = QLabel("Jitter: 20ms")
self.jitter_label.setObjectName("networkParam")
network_grid.addWidget(self.jitter_label, 0, 3)
# Latency indicator
network_grid.addWidget(QLabel(""), 1, 0)
self.latency_label = QLabel("Latency: 100ms")
self.latency_label.setObjectName("networkParam")
network_grid.addWidget(self.latency_label, 1, 1)
# Features status
network_grid.addWidget(QLabel("🌊"), 1, 2)
self.features_label = QLabel("Standard")
self.features_label.setObjectName("networkParam")
network_grid.addWidget(self.features_label, 1, 3)
main_layout.addLayout(network_grid)
# Connection status
separator2 = QFrame()
separator2.setFrameShape(QFrame.HLine)
separator2.setObjectName("gsmSeparator")
main_layout.addWidget(separator2)
self.connection_status = QLabel("🟢 Connected to GSM Network")
self.connection_status.setObjectName("connectionStatus")
self.connection_status.setAlignment(Qt.AlignCenter)
main_layout.addWidget(self.connection_status)
main_layout.addStretch()
self.setLayout(main_layout)
# Apply styling
self.setStyleSheet("""
#gsmStatusWidget {
background-color: #2A2A2A;
border: 2px solid #0078D4;
border-radius: 10px;
}
#gsmStatusTitle {
font-size: 16px;
font-weight: bold;
color: #00A2E8;
padding: 5px;
}
#signalDbmLabel {
font-size: 14px;
font-weight: bold;
color: #4CAF50;
}
#qualityBar {
background-color: #1E1E1E;
border: 1px solid #444;
border-radius: 3px;
text-align: center;
color: white;
}
#qualityBar::chunk {
background-color: qlineargradient(x1:0, y1:0, x2:1, y2:0,
stop:0 #4CAF50, stop:0.5 #8BC34A, stop:1 #CDDC39);
border-radius: 3px;
}
#noiseBar {
background-color: #1E1E1E;
border: 1px solid #444;
border-radius: 3px;
text-align: center;
color: white;
}
#noiseBar::chunk {
background-color: qlineargradient(x1:0, y1:0, x2:1, y2:0,
stop:0 #FFC107, stop:0.5 #FF9800, stop:1 #FF5722);
border-radius: 3px;
}
#codecLabel {
font-size: 13px;
color: #E0E0E0;
font-weight: bold;
}
#networkParam {
font-size: 12px;
color: #B0B0B0;
}
#gsmSeparator {
background-color: #444;
max-height: 1px;
margin: 5px 0;
}
#connectionStatus {
font-size: 13px;
color: #4CAF50;
font-weight: bold;
padding: 5px;
}
QLabel {
color: #E0E0E0;
}
""")
def update_settings(self, settings):
"""Update the displayed settings"""
self.settings = settings
self.update_display()
def update_display(self):
"""Update all display elements based on current settings"""
# Update signal strength
self.signal_widget.set_signal_strength(self.settings['signal_strength'])
self.signal_dbm_label.setText(f"{self.settings['signal_strength']} dBm")
# Color code the dBm label
if self.settings['signal_strength'] >= -60:
self.signal_dbm_label.setStyleSheet("color: #4CAF50;") # Green
elif self.settings['signal_strength'] >= -75:
self.signal_dbm_label.setStyleSheet("color: #FFC107;") # Amber
else:
self.signal_dbm_label.setStyleSheet("color: #FF5722;") # Red
# Update quality bar
self.quality_bar.setValue(self.settings['signal_quality'])
self.quality_bar.setFormat(f"{self.settings['signal_quality']}%")
# Update noise bar
self.noise_bar.setValue(self.settings['noise_level'])
self.noise_bar.setFormat(f"{self.settings['noise_level']}%")
# Update codec info
self.codec_label.setText(f"{self.settings['codec_mode']} @ {self.settings['bitrate']} kbps")
# Update network parameters
self.packet_loss_label.setText(f"Loss: {self.settings['packet_loss']}%")
self.jitter_label.setText(f"Jitter: {self.settings['jitter']}ms")
self.latency_label.setText(f"Latency: {self.settings['latency']}ms")
# Update features
features = []
if self.settings['fading_enabled']:
features.append(f"Fading({self.settings['fading_speed']})")
if self.settings['interference_enabled']:
features.append("Interference")
if self.settings['handover_enabled']:
features.append("Handover")
if features:
self.features_label.setText(", ".join(features))
else:
self.features_label.setText("Standard")
# Update connection status based on signal quality
if self.settings['signal_quality'] >= 80:
self.connection_status.setText("🟢 Excellent Connection")
self.connection_status.setStyleSheet("color: #4CAF50;")
elif self.settings['signal_quality'] >= 60:
self.connection_status.setText("🟡 Good Connection")
self.connection_status.setStyleSheet("color: #FFC107;")
elif self.settings['signal_quality'] >= 40:
self.connection_status.setText("🟠 Fair Connection")
self.connection_status.setStyleSheet("color: #FF9800;")
else:
self.connection_status.setText("🔴 Poor Connection")
self.connection_status.setStyleSheet("color: #FF5722;")

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,127 @@
"""Wrapper for Noise XK handshake over GSM simulator"""
import struct
from dissononce.processing.impl.handshakestate import HandshakeState
from dissononce.processing.impl.symmetricstate import SymmetricState
from dissononce.processing.impl.cipherstate import CipherState
from dissononce.processing.handshakepatterns.interactive.XK import XKHandshakePattern
from dissononce.cipher.chachapoly import ChaChaPolyCipher
from dissononce.dh.x25519.x25519 import X25519DH
from dissononce.dh.keypair import KeyPair
from dissononce.dh.x25519.public import PublicKey
from dissononce.hash.sha256 import SHA256Hash
class NoiseXKWrapper:
"""Wrapper for Noise XK that works over message-passing instead of direct sockets"""
def __init__(self, keypair, peer_pubkey, debug_callback=None):
self.keypair = keypair
self.peer_pubkey = peer_pubkey
self.debug = debug_callback or print
# Build handshake state
cipher = ChaChaPolyCipher()
dh = X25519DH()
hshash = SHA256Hash()
symmetric = SymmetricState(CipherState(cipher), hshash)
self._hs = HandshakeState(symmetric, dh)
self._send_cs = None
self._recv_cs = None
self.handshake_complete = False
self.is_initiator = None # Track initiator status
# Message buffers
self.outgoing_messages = []
self.incoming_messages = []
def start_handshake(self, initiator):
"""Start the handshake process"""
self.debug(f"Starting Noise XK handshake as {'initiator' if initiator else 'responder'}")
self.is_initiator = initiator # Store initiator status
if initiator:
# Initiator knows peer's static out-of-band
self._hs.initialize(
XKHandshakePattern(),
True,
b'',
s=self.keypair,
rs=self.peer_pubkey
)
# Generate first message
buf = bytearray()
self._hs.write_message(b'', buf)
self.outgoing_messages.append(bytes(buf))
self.debug(f"Generated handshake message 1: {len(buf)} bytes")
else:
# Responder doesn't know peer's static yet
self._hs.initialize(
XKHandshakePattern(),
False,
b'',
s=self.keypair
)
self.debug("Responder initialized, waiting for first message")
def process_handshake_message(self, data):
"""Process incoming handshake message and generate response if needed"""
self.debug(f"Processing handshake message: {len(data)} bytes")
try:
# Read the message
payload = bytearray()
cs_pair = self._hs.read_message(data, payload)
# Check if we need to send a response
if not cs_pair:
# More messages needed
buf = bytearray()
cs_pair = self._hs.write_message(b'', buf)
self.outgoing_messages.append(bytes(buf))
self.debug(f"Generated handshake response: {len(buf)} bytes")
# Check if handshake completed after writing (for initiator)
if cs_pair:
self._complete_handshake(cs_pair)
else:
# Handshake complete after reading (for responder)
self._complete_handshake(cs_pair)
except Exception as e:
self.debug(f"Handshake error: {e}")
raise
def get_next_handshake_message(self):
"""Get next outgoing handshake message"""
if self.outgoing_messages:
return self.outgoing_messages.pop(0)
return None
def encrypt(self, plaintext):
"""Encrypt a message"""
if not self.handshake_complete:
raise RuntimeError("Handshake not complete")
return self._send_cs.encrypt_with_ad(b'', plaintext)
def decrypt(self, ciphertext):
"""Decrypt a message"""
if not self.handshake_complete:
raise RuntimeError("Handshake not complete")
return self._recv_cs.decrypt_with_ad(b'', ciphertext)
def _complete_handshake(self, cs_pair):
"""Complete the handshake with the given cipher states"""
self.debug("Handshake complete, setting up cipher states")
cs0, cs1 = cs_pair
# Use stored initiator status
if self.is_initiator:
self._send_cs, self._recv_cs = cs0, cs1
self.debug("Set up cipher states as initiator")
else:
self._send_cs, self._recv_cs = cs1, cs0
self.debug("Set up cipher states as responder")
self.handshake_complete = True
self.debug("Cipher states established")

View File

@ -1,110 +0,0 @@
import socket
import time
import select
from PyQt5.QtCore import QThread, pyqtSignal
from client_state import ClientState
class PhoneClient(QThread):
data_received = pyqtSignal(bytes, int)
state_changed = pyqtSignal(str, str, int)
def __init__(self, client_id):
super().__init__()
self.host = "localhost"
self.port = 12345
self.client_id = client_id
self.sock = None
self.running = True
self.state = ClientState(client_id)
def connect_socket(self):
retries = 3
for attempt in range(retries):
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
self.sock.settimeout(120)
self.sock.connect((self.host, self.port))
print(f"Client {self.client_id} connected to {self.host}:{self.port}")
return True
except Exception as e:
print(f"Client {self.client_id} connection attempt {attempt + 1} failed: {e}")
if attempt < retries - 1:
time.sleep(1)
self.sock = None
return False
def run(self):
while self.running:
if not self.sock:
if not self.connect_socket():
print(f"Client {self.client_id} failed to connect after retries")
self.state_changed.emit("CALL_END", "", self.client_id)
break
try:
while self.running:
self.state.process_command(self)
self.state.check_handshake_timeout(self)
if not self.state.handshake_in_progress:
if self.sock is None:
print(f"Client {self.client_id} socket is None, exiting inner loop")
break
readable, _, _ = select.select([self.sock], [], [], 0.01)
if readable:
try:
if self.sock is None:
print(f"Client {self.client_id} socket is None before recv, exiting")
break
data = self.sock.recv(1024)
if not data:
print(f"Client {self.client_id} disconnected")
self.state_changed.emit("CALL_END", "", self.client_id)
break
self.state.handle_data(self, data)
except socket.error as e:
print(f"Client {self.client_id} socket error: {e}")
self.state_changed.emit("CALL_END", "", self.client_id)
break
else:
self.msleep(20)
print(f"Client {self.client_id} yielding during handshake")
self.msleep(1)
except Exception as e:
print(f"Client {self.client_id} unexpected error in run loop: {e}")
self.state_changed.emit("CALL_END", "", self.client_id)
break
finally:
if self.sock:
try:
self.sock.close()
except Exception as e:
print(f"Client {self.client_id} error closing socket: {e}")
self.sock = None
def send(self, message):
if self.sock and self.running:
try:
if isinstance(message, str):
data = message.encode('utf-8')
self.sock.send(data)
print(f"Client {self.client_id} sent: {message}, length={len(data)}")
else:
self.sock.send(message)
print(f"Client {self.client_id} sent binary data, length={len(message)}")
except socket.error as e:
print(f"Client {self.client_id} send error: {e}")
self.state_changed.emit("CALL_END", "", self.client_id)
def stop(self):
self.running = False
if self.sock:
try:
self.sock.close()
except Exception as e:
print(f"Client {self.client_id} error closing socket in stop: {e}")
self.sock = None
self.quit()
self.wait(1000)
def start_handshake(self, initiator, keypair, peer_pubkey):
self.state.start_handshake(initiator, keypair, peer_pubkey)

View File

@ -1,17 +1,37 @@
import secrets
from PyQt5.QtCore import QTimer
from phone_client import PhoneClient
from protocol_phone_client import ProtocolPhoneClient
from session import NoiseXKSession
from phone_state import PhoneState # Added import
from audio_player import AudioPlayer
from audio_processor import AudioProcessor
import struct
import wave
import os
class PhoneManager:
def __init__(self):
self.phones = []
self.handshake_done_count = 0
self.ui = None # Will be set by UI
self.audio_player = AudioPlayer()
self.audio_player.set_debug_callback(self.debug)
self.audio_processor = AudioProcessor()
self.audio_processor.set_debug_callback(self.debug)
self.audio_buffer = {} # client_id -> list of audio chunks for processing
def debug(self, message):
"""Send debug message to UI if available"""
if self.ui and hasattr(self.ui, 'debug'):
self.ui.debug(f"[PhoneManager] {message}")
else:
print(f"[PhoneManager] {message}")
def initialize_phones(self):
for i in range(2):
client = PhoneClient(i)
client = ProtocolPhoneClient(i) # Use protocol client
client.set_debug_callback(self.debug) # Set debug callback
client.manager = self # Set manager reference for handshake lookup
keypair = NoiseXKSession.generate_keypair()
phone = {
'id': i,
@ -21,9 +41,15 @@ class PhoneManager:
'audio_timer': None,
'keypair': keypair,
'public_key': keypair.public,
'is_initiator': False
'is_initiator': False,
'audio_file': None, # For test audio
'frame_counter': 0,
'playback_enabled': False,
'recording_enabled': False
}
client.keypair = keypair # Also set keypair on client
self.phones.append(phone)
self.debug(f"Initialized Phone {i+1} with public key: {keypair.public.data.hex()[:32]}...")
self.phones[0]['peer_public_key'] = self.phones[1]['public_key']
self.phones[1]['peer_public_key'] = self.phones[0]['public_key']
@ -31,16 +57,19 @@ class PhoneManager:
def phone_action(self, phone_id, ui_manager):
phone = self.phones[phone_id]
other_phone = self.phones[1 - phone_id]
print(f"Phone {phone_id + 1} Action, current state: {phone['state']}, is_initiator: {phone['is_initiator']}")
self.debug(f"Phone {phone_id + 1} action triggered, current state: {phone['state'].name}")
if phone['state'] == PhoneState.IDLE:
self.debug(f"Phone {phone_id + 1} initiating call to Phone {2-phone_id}")
phone['state'] = PhoneState.CALLING
other_phone['state'] = PhoneState.RINGING
phone['is_initiator'] = True
other_phone['is_initiator'] = False
phone['client'].send("RINGING")
elif phone['state'] == PhoneState.RINGING:
phone['state'] = other_phone['state'] = PhoneState.IN_CALL
self.debug(f"Phone {phone_id + 1} answering call from Phone {2-phone_id}")
phone['state'] = PhoneState.IN_CALL
# Don't set other_phone state here - let it set when it receives IN_CALL
phone['client'].send("IN_CALL")
elif phone['state'] in [PhoneState.IN_CALL, PhoneState.CALLING]:
if not phone['client'].state.handshake_in_progress and phone['state'] != PhoneState.CALLING:
@ -49,40 +78,289 @@ class PhoneManager:
for p in [phone, other_phone]:
if p['audio_timer']:
p['audio_timer'].stop()
# End voice session
if p['client'].voice_active:
p['client'].end_voice_session()
# Close audio file
if p['audio_file']:
p['audio_file'].close()
p['audio_file'] = None
p['frame_counter'] = 0
else:
print(f"Phone {phone_id + 1} cannot hang up during handshake or call setup")
self.debug(f"Phone {phone_id + 1} cannot hang up during handshake or call setup")
ui_manager.update_phone_ui(phone_id)
ui_manager.update_phone_ui(1 - phone_id)
def send_audio(self, phone_id):
phone = self.phones[phone_id]
if phone['state'] == PhoneState.IN_CALL and phone['client'].state.session and phone['client'].sock:
mock_audio = secrets.token_bytes(16)
try:
if phone['state'] != PhoneState.IN_CALL:
self.debug(f"Phone {phone_id + 1} not in call, stopping audio timer")
if phone['audio_timer']:
phone['audio_timer'].stop()
return
if not phone['client'].handshake_complete:
self.debug(f"Phone {phone_id + 1} handshake not complete, skipping audio send")
return
if not phone['client'].voice_active:
self.debug(f"Phone {phone_id + 1} voice not active, skipping audio send")
return
if phone['state'] == PhoneState.IN_CALL and phone['client'].handshake_complete and phone['client'].voice_active:
# Load test audio file if not loaded
if phone['audio_file'] is None:
wav_path = "../wav/input.wav"
if not os.path.exists(wav_path):
wav_path = "wav/input.wav"
if os.path.exists(wav_path):
try:
phone['audio_file'] = wave.open(wav_path, 'rb')
self.debug(f"Phone {phone_id + 1} loaded test audio file: {wav_path}")
# Verify it's 8kHz mono
if phone['audio_file'].getframerate() != 8000:
self.debug(f"Warning: {wav_path} is {phone['audio_file'].getframerate()}Hz, expected 8000Hz")
if phone['audio_file'].getnchannels() != 1:
self.debug(f"Warning: {wav_path} has {phone['audio_file'].getnchannels()} channels, expected 1")
# Skip initial silence - jump to 1 second in (8000 samples)
phone['audio_file'].setpos(8000)
self.debug(f"Phone {phone_id + 1} skipped initial silence, starting at 1 second")
except Exception as e:
self.debug(f"Phone {phone_id + 1} failed to load audio: {e}")
# Use mock audio as fallback
phone['audio_file'] = None
# Read audio frame (40ms at 8kHz = 320 samples)
if phone['audio_file']:
try:
frames = phone['audio_file'].readframes(320)
if not frames or len(frames) < 640: # 320 samples * 2 bytes
# Loop back to 1 second (skip silence)
phone['audio_file'].setpos(8000)
frames = phone['audio_file'].readframes(320)
self.debug(f"Phone {phone_id + 1} looped audio back to 1 second mark")
# Send through protocol (codec + 4FSK + encryption)
phone['client'].send_voice_frame(frames)
# Update waveform only every 5 frames to reduce CPU usage
if phone['frame_counter'] % 5 == 0:
if len(frames) >= 2:
self.update_sent_waveform(phone_id, frames)
# If playback is enabled on the sender, play the original audio
if phone['playback_enabled']:
self.audio_player.add_audio_data(phone_id, frames)
if phone['frame_counter'] % 25 == 0:
self.debug(f"Phone {phone_id + 1} playing original audio (sender playback)")
phone['frame_counter'] += 1
if phone['frame_counter'] % 25 == 0: # Log every second
self.debug(f"Phone {phone_id + 1} sent {phone['frame_counter']} voice frames")
except Exception as e:
self.debug(f"Phone {phone_id + 1} audio send error: {e}")
else:
# Fallback: send mock audio
mock_audio = secrets.token_bytes(320)
phone['client'].send_voice_frame(mock_audio)
self.update_sent_waveform(phone_id, mock_audio)
phone['client'].state.session.send(phone['client'].sock, mock_audio)
print(f"Client {phone_id} sent encrypted audio packet, length=32")
except Exception as e:
print(f"Client {phone_id} failed to send audio: {e}")
def start_audio(self, client_id, parent=None):
self.handshake_done_count += 1
print(f"HANDSHAKE_DONE received for client {client_id}, count: {self.handshake_done_count}")
self.debug(f"HANDSHAKE_DONE received for client {client_id}, count: {self.handshake_done_count}")
# Start voice session for this client
phone = self.phones[client_id]
if phone['client'].handshake_complete and not phone['client'].voice_active:
phone['client'].start_voice_session()
if self.handshake_done_count == 2:
for phone in self.phones:
if phone['state'] == PhoneState.IN_CALL:
if not phone['audio_timer'] or not phone['audio_timer'].isActive():
phone['audio_timer'] = QTimer(parent) # Parent to PhoneUI
phone['audio_timer'].timeout.connect(lambda pid=phone['id']: self.send_audio(pid))
phone['audio_timer'].start(100)
# Add a small delay to ensure both sides are ready
def start_audio_timers():
self.debug("Starting audio timers for both phones")
for phone in self.phones:
if phone['state'] == PhoneState.IN_CALL:
if not phone['audio_timer'] or not phone['audio_timer'].isActive():
phone['audio_timer'] = QTimer(parent) # Parent to PhoneUI
phone['audio_timer'].timeout.connect(lambda pid=phone['id']: self.send_audio(pid))
phone['audio_timer'].start(40) # 40ms for each voice frame
# Delay audio start by 500ms to ensure both sides are ready
QTimer.singleShot(500, start_audio_timers)
self.handshake_done_count = 0
def update_waveform(self, client_id, data):
self.phones[client_id]['waveform'].set_data(data)
# Only process actual audio data (should be 640 bytes for 320 samples * 2 bytes)
# Ignore small control messages
if len(data) < 320: # Less than 160 samples (too small for audio)
self.debug(f"Phone {client_id + 1} received non-audio data: {len(data)} bytes (ignoring)")
return
# Debug log audio data reception (only occasionally to avoid spam)
if not hasattr(self, '_audio_frame_count'):
self._audio_frame_count = {}
if client_id not in self._audio_frame_count:
self._audio_frame_count[client_id] = 0
self._audio_frame_count[client_id] += 1
# Update waveform only every 5 frames to reduce CPU usage
if self._audio_frame_count[client_id] % 5 == 0:
self.phones[client_id]['waveform'].set_data(data)
if self._audio_frame_count[client_id] == 1 or self._audio_frame_count[client_id] % 25 == 0:
self.debug(f"Phone {client_id + 1} received audio frame #{self._audio_frame_count[client_id]}: {len(data)} bytes")
# Store audio data in buffer for potential processing
if client_id not in self.audio_buffer:
self.audio_buffer[client_id] = []
self.audio_buffer[client_id].append(data)
# Keep buffer size reasonable (last 30 seconds at 8kHz)
max_chunks = 30 * 25 # 30 seconds * 25 chunks/second
if len(self.audio_buffer[client_id]) > max_chunks:
self.audio_buffer[client_id] = self.audio_buffer[client_id][-max_chunks:]
# Forward audio data to player if playback is enabled
if self.phones[client_id]['playback_enabled']:
if self._audio_frame_count[client_id] == 1:
self.debug(f"Phone {client_id + 1} forwarding audio to player (playback enabled)")
self.audio_player.add_audio_data(client_id, data)
def update_sent_waveform(self, client_id, data):
self.phones[client_id]['sent_waveform'].set_data(data)
def toggle_playback(self, client_id):
"""Toggle audio playback for a phone"""
phone = self.phones[client_id]
if phone['playback_enabled']:
# Stop playback
self.audio_player.stop_playback(client_id)
phone['playback_enabled'] = False
self.debug(f"Phone {client_id + 1} playback stopped")
else:
# Start playback
if self.audio_player.start_playback(client_id):
phone['playback_enabled'] = True
self.debug(f"Phone {client_id + 1} playback started")
# Removed test beep - we want to hear actual audio
else:
self.debug(f"Phone {client_id + 1} failed to start playback")
return phone['playback_enabled']
def toggle_recording(self, client_id):
"""Toggle audio recording for a phone"""
phone = self.phones[client_id]
if phone['recording_enabled']:
# Stop recording and save
save_path = self.audio_player.stop_recording(client_id)
phone['recording_enabled'] = False
if save_path:
self.debug(f"Phone {client_id + 1} recording saved to {save_path}")
return False, save_path
else:
# Start recording
self.audio_player.start_recording(client_id)
phone['recording_enabled'] = True
self.debug(f"Phone {client_id + 1} recording started")
return True, None
def save_received_audio(self, client_id, filename=None):
"""Save the last received audio to a file"""
if client_id not in self.phones:
return None
save_path = self.audio_player.stop_recording(client_id, filename)
if save_path:
self.debug(f"Phone {client_id + 1} audio saved to {save_path}")
return save_path
def process_audio(self, client_id, processing_type, **kwargs):
"""Process buffered audio with specified processing type"""
if client_id not in self.audio_buffer or not self.audio_buffer[client_id]:
self.debug(f"No audio data available for Phone {client_id + 1}")
return None
# Combine all audio chunks
combined_audio = b''.join(self.audio_buffer[client_id])
# Apply processing based on type
processed_audio = combined_audio
if processing_type == "normalize":
target_db = kwargs.get('target_db', -3)
processed_audio = self.audio_processor.normalize_audio(combined_audio, target_db)
elif processing_type == "gain":
gain_db = kwargs.get('gain_db', 0)
processed_audio = self.audio_processor.apply_gain(combined_audio, gain_db)
elif processing_type == "noise_gate":
threshold_db = kwargs.get('threshold_db', -40)
processed_audio = self.audio_processor.apply_noise_gate(combined_audio, threshold_db)
elif processing_type == "low_pass":
cutoff_hz = kwargs.get('cutoff_hz', 3400)
processed_audio = self.audio_processor.apply_low_pass_filter(combined_audio, cutoff_hz)
elif processing_type == "high_pass":
cutoff_hz = kwargs.get('cutoff_hz', 300)
processed_audio = self.audio_processor.apply_high_pass_filter(combined_audio, cutoff_hz)
elif processing_type == "remove_silence":
threshold_db = kwargs.get('threshold_db', -40)
processed_audio = self.audio_processor.remove_silence(combined_audio, threshold_db)
# Save processed audio
save_path = f"wav/phone{client_id + 1}_received.wav"
processed_path = self.audio_processor.save_processed_audio(
processed_audio, save_path, processing_type
)
return processed_path
def export_buffered_audio(self, client_id, filename=None):
"""Export current audio buffer to file"""
if client_id not in self.audio_buffer or not self.audio_buffer[client_id]:
self.debug(f"No audio data available for Phone {client_id + 1}")
return None
# Combine all audio chunks
combined_audio = b''.join(self.audio_buffer[client_id])
# Generate filename if not provided
if not filename:
from datetime import datetime
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
filename = f"wav/phone{client_id + 1}_buffer_{timestamp}.wav"
# Ensure directory exists
os.makedirs(os.path.dirname(filename), exist_ok=True)
try:
with wave.open(filename, 'wb') as wav_file:
wav_file.setnchannels(1)
wav_file.setsampwidth(2)
wav_file.setframerate(8000)
wav_file.writeframes(combined_audio)
self.debug(f"Exported audio buffer for Phone {client_id + 1} to {filename}")
return filename
except Exception as e:
self.debug(f"Failed to export audio buffer: {e}")
return None
def clear_audio_buffer(self, client_id):
"""Clear audio buffer for a phone"""
if client_id in self.audio_buffer:
self.audio_buffer[client_id] = []
self.debug(f"Cleared audio buffer for Phone {client_id + 1}")
def map_state(self, state_str):
if state_str == "RINGING":

View File

@ -1,4 +1,6 @@
class PhoneState:
from enum import Enum
class PhoneState(Enum):
IDLE = 0
CALLING = 1
IN_CALL = 2

View File

@ -0,0 +1,133 @@
# protocol_client_state.py
from queue import Queue
from session import NoiseXKSession
import time
class ProtocolClientState:
"""Enhanced client state for integrated protocol with voice codec"""
def __init__(self, client_id):
self.client_id = client_id
self.command_queue = Queue()
self.initiator = None
self.keypair = None
self.peer_pubkey = None
self.session = None
self.handshake_in_progress = False
self.handshake_start_time = None
self.call_active = False
self.voice_active = False
self.debug_callback = None
def debug(self, message):
"""Send debug message"""
if self.debug_callback:
self.debug_callback(f"[State{self.client_id+1}] {message}")
else:
print(f"[State{self.client_id+1}] {message}")
def process_command(self, client):
"""Process commands from the queue."""
if not self.command_queue.empty():
self.debug(f"Processing command queue, size: {self.command_queue.qsize()}")
command = self.command_queue.get()
self.debug(f"Processing command: {command}")
if command == "handshake":
# Handshake is now handled by the wrapper in the client
self.debug(f"Handshake command processed")
self.handshake_in_progress = False
self.handshake_start_time = None
elif command == "start_voice":
if client.handshake_complete:
client.start_voice_session()
self.voice_active = True
elif command == "end_voice":
if self.voice_active:
client.end_voice_session()
self.voice_active = False
def start_handshake(self, initiator, keypair, peer_pubkey):
"""Queue handshake command."""
self.initiator = initiator
self.keypair = keypair
self.peer_pubkey = peer_pubkey
self.debug(f"Queuing handshake, initiator: {initiator}")
self.handshake_in_progress = True
self.handshake_start_time = time.time()
self.command_queue.put("handshake")
def handle_data(self, client, data):
"""Handle received data (control or audio)."""
try:
# Try to decode as text first
decoded_data = data.decode('utf-8').strip()
self.debug(f"Received raw: {decoded_data}")
# Handle control messages
if decoded_data in ["RINGING", "CALL_END", "CALL_DROPPED", "IN_CALL", "HANDSHAKE", "HANDSHAKE_DONE"]:
self.debug(f"Emitting state change: {decoded_data}")
# Log which client is receiving what
self.debug(f"Client {self.client_id} received {decoded_data} message")
client.state_changed.emit(decoded_data, decoded_data, self.client_id)
if decoded_data == "IN_CALL":
self.debug(f"Received IN_CALL, setting call_active = True")
self.call_active = True
elif decoded_data == "HANDSHAKE":
self.debug(f"Received HANDSHAKE, setting handshake_in_progress = True")
self.handshake_in_progress = True
elif decoded_data == "HANDSHAKE_DONE":
self.debug(f"Received HANDSHAKE_DONE from peer")
self.call_active = True
# Start voice session on this side too
if client.handshake_complete and not client.voice_active:
self.debug(f"Starting voice session after receiving HANDSHAKE_DONE")
self.command_queue.put("start_voice")
elif decoded_data in ["CALL_END", "CALL_DROPPED"]:
self.debug(f"Received {decoded_data}, ending call")
self.call_active = False
if self.voice_active:
self.command_queue.put("end_voice")
else:
self.debug(f"Ignored unexpected text message: {decoded_data}")
except UnicodeDecodeError:
# Handle binary data (protocol messages or encrypted data)
if len(data) > 0 and data[0] == 0x20 and not client.handshake_complete: # Noise handshake message only before handshake completes
self.debug(f"Received Noise handshake message")
# Initialize responder if not already done
if not client.handshake_initiated:
# Find the other phone's public key
# This is a bit hacky but works for our 2-phone setup
manager = getattr(client, 'manager', None)
if manager:
other_phone = manager.phones[1 - self.client_id]
client.start_handshake(initiator=False,
keypair=client.keypair or manager.phones[self.client_id]['keypair'],
peer_pubkey=other_phone['public_key'])
# Pass to protocol handler
client._handle_protocol_message(data)
elif client.handshake_complete and client.noise_wrapper:
# Pass encrypted data back to client for decryption
client._handle_encrypted_data(data)
else:
# Pass other binary messages to protocol handler only if not yet complete
if not client.handshake_complete:
client._handle_protocol_message(data)
def check_handshake_timeout(self, client):
"""Check for handshake timeout."""
if self.handshake_in_progress and self.handshake_start_time:
if time.time() - self.handshake_start_time > 30:
self.debug(f"Handshake timeout after 30s")
client.state_changed.emit("CALL_END", "", self.client_id)
self.handshake_in_progress = False
self.handshake_start_time = None
def queue_voice_command(self, command):
"""Queue voice-related commands"""
if command in ["start_voice", "end_voice"]:
self.command_queue.put(command)

View File

@ -0,0 +1,457 @@
import socket
import time
import select
import struct
import array
from PyQt5.QtCore import QThread, pyqtSignal
from protocol_client_state import ProtocolClientState
from session import NoiseXKSession
from noise_wrapper import NoiseXKWrapper
from dissononce.dh.keypair import KeyPair
from dissononce.dh.x25519.public import PublicKey
import sys
import os
# Add path to access voice_codec from Prototype directory
sys.path.append(os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))), 'Prototype', 'Protocol_Alpha_0'))
from voice_codec import Codec2Wrapper, FSKModem, Codec2Mode
# ChaCha20 removed - using only Noise XK encryption
class ProtocolPhoneClient(QThread):
"""Integrated phone client with Noise XK, Codec2, 4FSK, and ChaCha20"""
data_received = pyqtSignal(bytes, int)
state_changed = pyqtSignal(str, str, int)
def __init__(self, client_id):
super().__init__()
self.host = "localhost"
self.port = 12345
self.client_id = client_id
self.sock = None
self.running = True
self.state = ProtocolClientState(client_id)
# Noise XK session
self.noise_session = None
self.noise_wrapper = None
self.handshake_complete = False
self.handshake_initiated = False
# No buffer needed with larger frame size
# Voice codec components - use higher quality mode
self.codec = Codec2Wrapper(mode=Codec2Mode.MODE_3200) # Changed from 1200 to 3200 bps for better quality
self.modem = FSKModem()
# Voice encryption handled by Noise XK
# No separate voice key needed
# Voice state
self.voice_active = False
self.voice_frame_counter = 0
# Message buffer for fragmented messages
self.recv_buffer = bytearray()
# Debug callback
self.debug_callback = None
def set_debug_callback(self, callback):
"""Set debug callback function"""
self.debug_callback = callback
self.state.debug_callback = callback
def debug(self, message):
"""Send debug message"""
if self.debug_callback:
self.debug_callback(f"[Phone{self.client_id+1}] {message}")
else:
print(f"[Phone{self.client_id+1}] {message}")
def connect_socket(self):
retries = 3
for attempt in range(retries):
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
self.sock.settimeout(120)
self.sock.connect((self.host, self.port))
self.debug(f"Connected to GSM simulator at {self.host}:{self.port}")
return True
except Exception as e:
self.debug(f"Connection attempt {attempt + 1} failed: {e}")
if attempt < retries - 1:
time.sleep(1)
self.sock = None
return False
def run(self):
while self.running:
if not self.sock:
if not self.connect_socket():
self.debug("Failed to connect after retries")
self.state_changed.emit("CALL_END", "", self.client_id)
break
try:
while self.running:
self.state.process_command(self)
self.state.check_handshake_timeout(self)
if self.handshake_complete and self.voice_active:
# Process voice data if active
self._process_voice_data()
# Always check for incoming data, even during handshake
if self.sock is None:
break
readable, _, _ = select.select([self.sock], [], [], 0.01)
if readable:
try:
if self.sock is None:
break
chunk = self.sock.recv(4096)
if not chunk:
self.debug("Disconnected from server")
self.state_changed.emit("CALL_END", "", self.client_id)
break
# Add to buffer
self.recv_buffer.extend(chunk)
# Process complete messages
while len(self.recv_buffer) >= 4:
# Read message length
msg_len = struct.unpack('>I', self.recv_buffer[:4])[0]
# Check if we have the complete message
if len(self.recv_buffer) >= 4 + msg_len:
# Extract message
data = bytes(self.recv_buffer[4:4+msg_len])
# Remove from buffer
self.recv_buffer = self.recv_buffer[4+msg_len:]
# Pass to state handler
self.state.handle_data(self, data)
else:
# Wait for more data
break
except socket.error as e:
self.debug(f"Socket error: {e}")
self.state_changed.emit("CALL_END", "", self.client_id)
break
self.msleep(1)
except Exception as e:
self.debug(f"Unexpected error in run loop: {e}")
self.state_changed.emit("CALL_END", "", self.client_id)
break
finally:
if self.sock:
try:
self.sock.close()
except Exception as e:
self.debug(f"Error closing socket: {e}")
self.sock = None
def _handle_encrypted_data(self, data):
"""Handle encrypted data after handshake"""
if not self.handshake_complete or not self.noise_wrapper:
self.debug(f"Cannot decrypt - handshake not complete")
return
# All data after handshake is encrypted, decrypt it first
try:
plaintext = self.noise_wrapper.decrypt(data)
# Check if it's a text message
try:
text_msg = plaintext.decode('utf-8').strip()
if text_msg == "HANDSHAKE_DONE":
self.debug(f"Received encrypted HANDSHAKE_DONE")
self.state_changed.emit("HANDSHAKE_DONE", "HANDSHAKE_DONE", self.client_id)
return
except:
pass
# Otherwise handle as protocol message
self._handle_protocol_message(plaintext)
except Exception as e:
# Suppress common decryption errors
pass
def _handle_protocol_message(self, plaintext):
"""Handle decrypted protocol messages"""
if len(plaintext) < 1:
return
msg_type = plaintext[0]
msg_data = plaintext[1:]
if msg_type == 0x10: # Voice start
self.debug("Received VOICE_START message")
self._handle_voice_start(msg_data)
elif msg_type == 0x11: # Voice data
self._handle_voice_data(msg_data)
elif msg_type == 0x12: # Voice end
self.debug("Received VOICE_END message")
self._handle_voice_end(msg_data)
elif msg_type == 0x20: # Noise handshake
self.debug("Received NOISE_HS message")
self._handle_noise_handshake(msg_data)
else:
self.debug(f"Received unknown protocol message type: 0x{msg_type:02x}")
# Don't emit control messages to data_received - that's only for audio
# Control messages should be handled via state_changed signal
def _handle_voice_start(self, data):
"""Handle voice session start"""
self.debug("Voice session started by peer")
self.voice_active = True
self.voice_frame_counter = 0
self.state_changed.emit("VOICE_START", "", self.client_id)
def _handle_voice_data(self, data):
"""Handle voice frame (already decrypted by Noise)"""
if len(data) < 4:
return
try:
# Data is float array packed as bytes
# Unpack the float array
num_floats = len(data) // 4
modulated_signal = struct.unpack(f'{num_floats}f', data)
# Demodulate FSK
demodulated_data, confidence = self.modem.demodulate(modulated_signal)
if confidence > 0.5: # Only decode if confidence is good
# Create Codec2Frame from demodulated data
from voice_codec import Codec2Frame, Codec2Mode
frame = Codec2Frame(
mode=Codec2Mode.MODE_3200, # Match the encoder mode
bits=demodulated_data,
timestamp=time.time(),
frame_number=self.voice_frame_counter
)
# Decode with Codec2
pcm_samples = self.codec.decode(frame)
if self.voice_frame_counter == 0:
self.debug(f"First voice frame demodulated with confidence {confidence:.2f}")
# Send PCM to UI for playback
if pcm_samples is not None and len(pcm_samples) > 0:
# Only log details for first frame and every 25th frame
if self.voice_frame_counter == 0 or self.voice_frame_counter % 25 == 0:
self.debug(f"Decoded PCM samples: type={type(pcm_samples)}, len={len(pcm_samples)}")
# Convert to bytes if needed
if hasattr(pcm_samples, 'tobytes'):
pcm_bytes = pcm_samples.tobytes()
elif isinstance(pcm_samples, (list, array.array)):
# Convert array to bytes
import array
if isinstance(pcm_samples, list):
pcm_array = array.array('h', pcm_samples)
pcm_bytes = pcm_array.tobytes()
else:
pcm_bytes = pcm_samples.tobytes()
else:
pcm_bytes = bytes(pcm_samples)
if self.voice_frame_counter == 0:
self.debug(f"Emitting first PCM frame: {len(pcm_bytes)} bytes")
self.data_received.emit(pcm_bytes, self.client_id)
self.voice_frame_counter += 1
# Log frame reception periodically
if self.voice_frame_counter == 1 or self.voice_frame_counter % 25 == 0:
self.debug(f"Received voice data frame #{self.voice_frame_counter}")
else:
self.debug(f"Codec decode returned None or empty")
else:
if self.voice_frame_counter % 10 == 0:
self.debug(f"Low confidence demodulation: {confidence:.2f}")
except Exception as e:
self.debug(f"Voice decode error: {e}")
def _handle_voice_end(self, data):
"""Handle voice session end"""
self.debug("Voice session ended by peer")
self.voice_active = False
self.state_changed.emit("VOICE_END", "", self.client_id)
def _handle_noise_handshake(self, data):
"""Handle Noise handshake message"""
if not self.noise_wrapper:
self.debug("Received handshake message but no wrapper initialized")
return
try:
# Process the handshake message
self.noise_wrapper.process_handshake_message(data)
# Check if we need to send a response
response = self.noise_wrapper.get_next_handshake_message()
if response:
self.send(b'\x20' + response)
# Check if handshake is complete
if self.noise_wrapper.handshake_complete and not self.handshake_complete:
self.debug("Noise wrapper handshake complete, calling complete_handshake()")
self.complete_handshake()
except Exception as e:
self.debug(f"Handshake processing error: {e}")
self.state_changed.emit("CALL_END", "", self.client_id)
def _process_voice_data(self):
"""Process outgoing voice data"""
# This would be called when we have voice input to send
# For now, this is a placeholder
pass
def send_voice_frame(self, pcm_samples):
"""Send a voice frame through the protocol"""
if not self.handshake_complete:
self.debug("Cannot send voice - handshake not complete")
return
if not self.voice_active:
self.debug("Cannot send voice - voice session not active")
return
try:
# Encode with Codec2
codec_frame = self.codec.encode(pcm_samples)
if not codec_frame:
return
if self.voice_frame_counter % 25 == 0: # Log every 25 frames (1 second)
self.debug(f"Encoding voice frame #{self.voice_frame_counter}: {len(pcm_samples)} bytes PCM → {len(codec_frame.bits)} bytes compressed")
# Modulate with FSK
modulated_data = self.modem.modulate(codec_frame.bits)
# Convert modulated float array to bytes
modulated_bytes = struct.pack(f'{len(modulated_data)}f', *modulated_data)
if self.voice_frame_counter % 25 == 0:
self.debug(f"Voice frame size: {len(modulated_bytes)} bytes")
# Build voice data message (no ChaCha20, will be encrypted by Noise)
msg = bytes([0x11]) + modulated_bytes
# Send through Noise encrypted channel
self.send(msg)
self.voice_frame_counter += 1
except Exception as e:
self.debug(f"Voice encode error: {e}")
def send(self, message):
"""Send data through Noise encrypted channel with proper framing"""
if self.sock and self.running:
try:
# Handshake messages (0x20) bypass Noise encryption
if isinstance(message, bytes) and len(message) > 0 and message[0] == 0x20:
# Add length prefix for framing
framed = struct.pack('>I', len(message)) + message
self.sock.send(framed)
return
if self.handshake_complete and self.noise_wrapper:
# Encrypt everything with Noise after handshake
# Convert string to bytes if needed
if isinstance(message, str):
message = message.encode('utf-8')
encrypted = self.noise_wrapper.encrypt(message)
# Add length prefix for framing
framed = struct.pack('>I', len(encrypted)) + encrypted
self.sock.send(framed)
else:
# During handshake, send raw with framing
if isinstance(message, str):
data = message.encode('utf-8')
framed = struct.pack('>I', len(data)) + data
self.sock.send(framed)
self.debug(f"Sent control message: {message}")
else:
framed = struct.pack('>I', len(message)) + message
self.sock.send(framed)
except socket.error as e:
self.debug(f"Send error: {e}")
self.state_changed.emit("CALL_END", "", self.client_id)
def stop(self):
self.running = False
self.voice_active = False
if self.sock:
try:
self.sock.close()
except Exception as e:
self.debug(f"Error closing socket in stop: {e}")
self.sock = None
self.quit()
self.wait(1000)
def start_handshake(self, initiator, keypair, peer_pubkey):
"""Start Noise XK handshake"""
self.debug(f"Starting Noise XK handshake as {'initiator' if initiator else 'responder'}")
self.debug(f"Our public key: {keypair.public.data.hex()[:32]}...")
self.debug(f"Peer public key: {peer_pubkey.data.hex()[:32]}...")
# Create noise wrapper
self.noise_wrapper = NoiseXKWrapper(keypair, peer_pubkey, self.debug)
self.noise_wrapper.start_handshake(initiator)
self.handshake_initiated = True
# Send first handshake message if initiator
if initiator:
msg = self.noise_wrapper.get_next_handshake_message()
if msg:
# Send as NOISE_HS message type
self.send(b'\x20' + msg) # 0x20 = Noise handshake message
def complete_handshake(self):
"""Called when Noise handshake completes"""
self.handshake_complete = True
self.debug("Noise XK handshake complete!")
self.debug("Secure channel established")
# Send HANDSHAKE_DONE message
self.send("HANDSHAKE_DONE")
self.state_changed.emit("HANDSHAKE_COMPLETE", "", self.client_id)
def start_voice_session(self):
"""Start a voice session"""
if not self.handshake_complete:
self.debug("Cannot start voice - handshake not complete")
return
self.voice_active = True
self.voice_frame_counter = 0
# Send voice start message
msg = bytes([0x10]) # Voice start message type
self.send(msg)
self.debug("Voice session started")
self.state_changed.emit("VOICE_START", "", self.client_id)
def end_voice_session(self):
"""End a voice session"""
if not self.voice_active:
return
self.voice_active = False
# Send voice end message
msg = bytes([0x12]) # Voice end message type
self.send(msg)
self.debug("Voice session ended")
self.state_changed.emit("VOICE_END", "", self.client_id)

View File

@ -10,8 +10,8 @@ from dissononce.dh.keypair import KeyPair
from dissononce.dh.x25519.public import PublicKey
from dissononce.hash.sha256 import SHA256Hash
# Configure root logger for debug output
logging.basicConfig(level=logging.DEBUG, format="%(message)s")
# Configure logging - disabled by default to avoid noise
# logging.basicConfig(level=logging.DEBUG, format="%(message)s")
class NoiseXKSession:
@staticmethod
@ -46,7 +46,7 @@ class NoiseXKSession:
so that each side reads or writes in the correct message order.
On completion, self._send_cs and self._recv_cs hold the two CipherStates.
"""
logging.debug(f"[handshake] start (initiator={initiator})")
# logging.debug(f"[handshake] start (initiator={initiator})")
# initialize with our KeyPair and their PublicKey
if initiator:
# initiator knows peers static out-of-band
@ -58,7 +58,7 @@ class NoiseXKSession:
rs=self.peer_pubkey
)
else:
logging.debug("[handshake] responder initializing without rs")
# logging.debug("[handshake] responder initializing without rs")
# responder must NOT supply rs here
self._hs.initialize(
XKHandshakePattern(),
@ -72,34 +72,34 @@ class NoiseXKSession:
# 1) -> e
buf1 = bytearray()
cs_pair = self._hs.write_message(b'', buf1)
logging.debug(f"[-> e] {buf1.hex()}")
# logging.debug(f"[-> e] {buf1.hex()}")
self._send_all(sock, buf1)
# 2) <- e, es, s, ss
msg2 = self._recv_all(sock)
logging.debug(f"[<- msg2] {msg2.hex()}")
# logging.debug(f"[<- msg2] {msg2.hex()}")
self._hs.read_message(msg2, bytearray())
# 3) -> se (final)
buf3 = bytearray()
cs_pair = self._hs.write_message(b'', buf3)
logging.debug(f"[-> se] {buf3.hex()}")
# logging.debug(f"[-> se] {buf3.hex()}")
self._send_all(sock, buf3)
else:
# 1) <- e
msg1 = self._recv_all(sock)
logging.debug(f"[<- e] {msg1.hex()}")
# logging.debug(f"[<- e] {msg1.hex()}")
self._hs.read_message(msg1, bytearray())
# 2) -> e, es, s, ss
buf2 = bytearray()
cs_pair = self._hs.write_message(b'', buf2)
logging.debug(f"[-> msg2] {buf2.hex()}")
# logging.debug(f"[-> msg2] {buf2.hex()}")
self._send_all(sock, buf2)
# 3) <- se (final)
msg3 = self._recv_all(sock)
logging.debug(f"[<- se] {msg3.hex()}")
# logging.debug(f"[<- se] {msg3.hex()}")
cs_pair = self._hs.read_message(msg3, bytearray())
# on the final step, we must get exactly two CipherStates
@ -168,9 +168,9 @@ class NoiseXKSession:
# Read 2-byte length prefix, then the payload
hdr = self._read_exact(sock, 2)
length = int.from_bytes(hdr, 'big')
logging.debug(f"[RECV] length={length} ({hdr.hex()})")
# logging.debug(f"[RECV] length={length} ({hdr.hex()})")
data = self._read_exact(sock, length)
logging.debug(f"[RECV] data={data.hex()}")
# logging.debug(f"[RECV] data={data.hex()}")
return data
@staticmethod

View File

@ -1,4 +1,5 @@
import random
import struct
from PyQt5.QtWidgets import QWidget
from PyQt5.QtCore import QTimer, QSize, QPointF
from PyQt5.QtGui import QPainter, QColor, QPen, QLinearGradient, QBrush
@ -7,21 +8,44 @@ class WaveformWidget(QWidget):
def __init__(self, parent=None, dynamic=False):
super().__init__(parent)
self.dynamic = dynamic
self.setMinimumSize(200, 80)
self.setMaximumHeight(100)
self.waveform_data = [random.randint(10, 90) for _ in range(50)]
self.setMinimumSize(200, 60)
self.setMaximumHeight(80)
# Start with flat line instead of random data
self.waveform_data = [50 for _ in range(50)]
if self.dynamic:
self.timer = QTimer(self)
self.timer.timeout.connect(self.update_waveform)
self.timer.start(100)
def update_waveform(self):
self.waveform_data = self.waveform_data[1:] + [random.randint(10, 90)]
self.update()
# Only update with random data if dynamic mode is enabled
if self.dynamic:
self.waveform_data = self.waveform_data[1:] + [random.randint(10, 90)]
self.update()
def set_data(self, data):
amplitude = sum(byte for byte in data) % 90 + 10
self.waveform_data = self.waveform_data[1:] + [amplitude]
# Convert audio data to visual amplitude
if isinstance(data, bytes) and len(data) >= 2:
# Extract PCM samples (16-bit signed)
num_samples = min(len(data) // 2, 20) # Take up to 20 samples
samples = []
for i in range(0, num_samples * 2, 2):
if i + 1 < len(data):
sample = struct.unpack('h', data[i:i+2])[0]
# Normalize to 0-100 range
amplitude = abs(sample) / 327.68 # 32768/100
samples.append(min(95, max(5, amplitude)))
if samples:
# Add new samples and maintain fixed size
self.waveform_data.extend(samples)
# Keep last 50 samples
self.waveform_data = self.waveform_data[-50:]
else:
# Fallback for non-audio data
amplitude = sum(byte for byte in data[:20]) % 90 + 10
self.waveform_data = self.waveform_data[1:] + [amplitude]
self.update()
def paintEvent(self, event):

View File

@ -1,13 +0,0 @@
simulator/
├── gsm_simulator.py # gsm_simulator
├── launch_gsm_simulator.sh # use to start docker and simulator, run in terminal
2 clients nect to gsm_simulator and simulate a call using noise protocol
UI/
├── main.py # UI setup and event handling
├── phone_manager.py # Phone state, client init, audio logic
├── phone_client.py # Socket communication and threading
├── client_state.py # Client state and command processing
├── session.py # Noise XK crypto session
├── waveform_widget.py # Waveform UI component
├── phone_state.py # State constants

View File

@ -0,0 +1,58 @@
#!/bin/bash
# Install audio dependencies for DryBox
echo "Installing audio dependencies for DryBox..."
echo
# Detect OS
if [ -f /etc/os-release ]; then
. /etc/os-release
OS=$ID
VER=$VERSION_ID
else
echo "Cannot detect OS. Please install manually."
exit 1
fi
case $OS in
fedora)
echo "Detected Fedora $VER"
echo "Installing python3-devel and portaudio-devel..."
sudo dnf install -y python3-devel portaudio-devel
;;
ubuntu|debian)
echo "Detected $OS $VER"
echo "Installing python3-dev and portaudio19-dev..."
sudo apt-get update
sudo apt-get install -y python3-dev portaudio19-dev
;;
*)
echo "Unsupported OS: $OS"
echo "Please install manually:"
echo " - Python development headers"
echo " - PortAudio development libraries"
exit 1
;;
esac
if [ $? -eq 0 ]; then
echo
echo "System dependencies installed successfully!"
echo "Now installing PyAudio..."
pip install pyaudio
if [ $? -eq 0 ]; then
echo
echo "✅ Audio dependencies installed successfully!"
echo "You can now use real-time audio playback in DryBox."
else
echo
echo "❌ Failed to install PyAudio"
echo "Try: pip install --user pyaudio"
fi
else
echo
echo "❌ Failed to install system dependencies"
fi

View File

@ -0,0 +1,22 @@
# Core dependencies for DryBox integrated protocol
# Noise Protocol Framework
dissononce>=0.34.3
# Cryptography
cryptography>=41.0.0
# Qt GUI
PyQt5>=5.15.0
# Numerical computing (for signal processing)
numpy>=1.24.0
# Audio processing (for real audio I/O)
pyaudio>=0.2.11
# Wave file handling (included in standard library)
# wave
# For future integration with real Codec2
# pycodec2>=1.0.0

View File

@ -1,14 +0,0 @@
# Use official Python image
FROM python:3.9-slim
# Set working directory
WORKDIR /app
# Copy the simulator script
COPY gsm_simulator.py .
# Expose the port
EXPOSE 12345
# Run the simulator
CMD ["python", "gsm_simulator.py"]

View File

@ -1,10 +1,11 @@
import socket
import threading
import time
import struct
HOST = "0.0.0.0"
PORT = 12345
FRAME_SIZE = 1000
FRAME_SIZE = 10000 # Increased to avoid fragmenting voice frames
FRAME_DELAY = 0.02
clients = []
@ -12,25 +13,49 @@ clients_lock = threading.Lock()
def handle_client(client_sock, client_id):
print(f"Starting handle_client for Client {client_id}")
recv_buffer = bytearray()
try:
while True:
other_client = None
with clients_lock:
if len(clients) == 2 and client_id < len(clients):
other_client = clients[1 - client_id]
print(f"Client {client_id} waiting for data, other_client exists: {other_client is not None}")
try:
data = client_sock.recv(1024)
if not data:
chunk = client_sock.recv(4096)
if not chunk:
print(f"Client {client_id} disconnected or no data received")
break
if other_client:
for i in range(0, len(data), FRAME_SIZE):
frame = data[i:i + FRAME_SIZE]
other_client.send(frame)
time.sleep(FRAME_DELAY)
print(f"Forwarded {len(data)} bytes from Client {client_id} to Client {1 - client_id}")
# Add to buffer
recv_buffer.extend(chunk)
# Process complete messages
while len(recv_buffer) >= 4:
# Read message length
msg_len = struct.unpack('>I', recv_buffer[:4])[0]
# Check if we have the complete message
if len(recv_buffer) >= 4 + msg_len:
# Extract complete message (including length prefix)
complete_msg = bytes(recv_buffer[:4+msg_len])
# Remove from buffer
recv_buffer = recv_buffer[4+msg_len:]
# Forward complete message to other client
if other_client:
try:
other_client.send(complete_msg)
print(f"Forwarded {len(complete_msg)} bytes from Client {client_id} to Client {1 - client_id}")
except Exception as e:
print(f"Error forwarding from Client {client_id}: {e}")
else:
print(f"No other client to forward to from Client {client_id}")
else:
# Wait for more data
break
except socket.error as e:
print(f"Socket error with Client {client_id}: {e}")
break

View File

@ -1,68 +0,0 @@
#!/bin/bash
# Script to launch the GSM Simulator in Docker
# Variables
IMAGE_NAME="gsm-simulator"
CONTAINER_NAME="gsm-sim"
PORT="12345"
LOG_FILE="gsm_simulator.log"
# Check if Docker is installed
if ! command -v docker &> /dev/null; then
echo "Error: Docker is not installed. Please install Docker and try again."
exit 1
fi
# Check if gsm_simulator.py exists
if [ ! -f "gsm_simulator.py" ]; then
echo "Error: gsm_simulator.py not found in the current directory."
echo "Please ensure gsm_simulator.py is present and try again."
exit 1
fi
# Create Dockerfile if it doesn't exist
if [ ! -f "Dockerfile" ]; then
echo "Creating Dockerfile..."
cat <<EOF > Dockerfile
FROM python:3.9-slim
WORKDIR /app
COPY gsm_simulator.py .
EXPOSE 12345
CMD ["python", "gsm_simulator.py"]
EOF
fi
# Ensure log file is writable
touch $LOG_FILE
chmod 666 $LOG_FILE
# Build the Docker image
echo "Building Docker image: $IMAGE_NAME..."
docker build -t $IMAGE_NAME .
# Check if the build was successful
if [ $? -ne 0 ]; then
echo "Error: Failed to build Docker image."
exit 1
fi
# Stop and remove any existing container
if [ "$(docker ps -q -f name=$CONTAINER_NAME)" ]; then
echo "Stopping existing container: $CONTAINER_NAME..."
docker stop $CONTAINER_NAME
fi
if [ "$(docker ps -aq -f name=$CONTAINER_NAME)" ]; then
echo "Removing existing container: $CONTAINER_NAME..."
docker rm $CONTAINER_NAME
fi
# Clean up dangling images
docker image prune -f
# Run the Docker container interactively
echo "Launching GSM Simulator in Docker container: $CONTAINER_NAME..."
docker run -it --rm -p $PORT:$PORT --name $CONTAINER_NAME $IMAGE_NAME | tee $LOG_FILE
# Note: Script will block here until container exits
echo "GSM Simulator stopped. Logs saved to $LOG_FILE."

View File

@ -1,24 +0,0 @@
#external_caller.py
import socket
import time
def connect():
caller_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
caller_socket.connect(('localhost', 12345))
caller_socket.send("CALLER".encode())
print("Connected to GSM simulator as CALLER")
time.sleep(2) # Wait 2 seconds for receiver to connect
for i in range(5):
message = f"Audio packet {i + 1}"
caller_socket.send(message.encode())
print(f"Sent: {message}")
time.sleep(1)
caller_socket.send("CALL_END".encode())
print("Call ended.")
caller_socket.close()
if __name__ == "__main__":
connect()

View File

@ -1,37 +0,0 @@
#external_receiver.py
import socket
def connect():
receiver_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
receiver_socket.settimeout(15) # Increase timeout to 15 seconds
receiver_socket.connect(('localhost', 12345))
receiver_socket.send("RECEIVER".encode())
print("Connected to GSM simulator as RECEIVER")
while True:
try:
data = receiver_socket.recv(1024).decode().strip()
if not data:
print("No data received. Connection closed.")
break
if data == "RINGING":
print("Incoming call... ringing")
elif data == "CALL_END":
print("Call ended by caller.")
break
elif data == "CALL_DROPPED":
print("Call dropped by network.")
break
else:
print(f"Received: {data}")
except socket.timeout:
print("Timed out waiting for data.")
break
except Exception as e:
print(f"Receiver error: {e}")
break
receiver_socket.close()
if __name__ == "__main__":
connect()

View File

@ -1,86 +0,0 @@
import socket
import os
import time
import subprocess
# Configuration
HOST = "localhost"
PORT = 12345
INPUT_FILE = "wav/input.wav"
OUTPUT_FILE = "wav/received.wav"
def encrypt_data(data):
return data # Replace with your encryption protocol
def decrypt_data(data):
return data # Replace with your decryption protocol
def run_protocol(send_mode=True):
"""Connect to the simulator and send/receive data."""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((HOST, PORT))
print(f"Connected to simulator at {HOST}:{PORT}")
if send_mode:
# Sender mode: Encode audio with toast
os.system(f"toast -p -l {INPUT_FILE}") # Creates input.wav.gsm
input_gsm_file = f"{INPUT_FILE}.gsm"
if not os.path.exists(input_gsm_file):
print(f"Error: {input_gsm_file} not created")
sock.close()
return
with open(input_gsm_file, "rb") as f:
voice_data = f.read()
encrypted_data = encrypt_data(voice_data)
sock.send(encrypted_data)
print(f"Sent {len(encrypted_data)} bytes")
os.remove(input_gsm_file) # Clean up
else:
# Receiver mode: Wait for and receive data
print("Waiting for data from sender...")
received_data = b""
sock.settimeout(5.0)
try:
while True:
print("Calling recv()...")
data = sock.recv(1024)
print(f"Received {len(data)} bytes")
if not data:
print("Connection closed by sender or simulator")
break
received_data += data
except socket.timeout:
print("Timed out waiting for data")
if received_data:
with open("received.gsm", "wb") as f:
f.write(decrypt_data(received_data))
print(f"Wrote {len(received_data)} bytes to received.gsm")
# Decode with untoast, then convert to WAV with sox
result = subprocess.run(["untoast", "received.gsm"], capture_output=True, text=True)
print(f"untoast return code: {result.returncode}")
print(f"untoast stderr: {result.stderr}")
if result.returncode == 0:
if os.path.exists("received"):
# Convert raw PCM to WAV (8 kHz, mono, 16-bit)
subprocess.run(["sox", "-t", "raw", "-r", "8000", "-e", "signed", "-b", "16", "-c", "1", "received",
OUTPUT_FILE])
os.remove("received")
print(f"Received and saved {len(received_data)} bytes to {OUTPUT_FILE}")
else:
print("Error: 'received' file not created by untoast")
else:
print(f"untoast failed: {result.stderr}")
else:
print("No data received from simulator")
sock.close()
if __name__ == "__main__":
mode = input("Enter 'send' to send data or 'receive' to receive data: ").strip().lower()
run_protocol(send_mode=(mode == "send"))

Binary file not shown.

View File

@ -0,0 +1,566 @@
<mxfile host="app.diagrams.net" agent="Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36" version="26.1.3" pages="2">
<diagram id="C5RBs43oDa-KdzZeNtuy" name="Logique">
<mxGraphModel dx="735" dy="407" grid="1" gridSize="10" guides="1" tooltips="1" connect="1" arrows="1" fold="1" page="1" pageScale="1" pageWidth="827" pageHeight="1169" math="0" shadow="0">
<root>
<mxCell id="WIyWlLk6GJQsqaUBKTNV-0" />
<mxCell id="WIyWlLk6GJQsqaUBKTNV-1" parent="WIyWlLk6GJQsqaUBKTNV-0" />
<mxCell id="WIyWlLk6GJQsqaUBKTNV-2" value="" style="rounded=0;html=1;jettySize=auto;orthogonalLoop=1;fontSize=11;endArrow=block;endFill=0;endSize=8;strokeWidth=1;shadow=0;labelBackgroundColor=none;edgeStyle=orthogonalEdgeStyle;" parent="WIyWlLk6GJQsqaUBKTNV-1" source="WIyWlLk6GJQsqaUBKTNV-3" target="WIyWlLk6GJQsqaUBKTNV-6" edge="1">
<mxGeometry relative="1" as="geometry" />
</mxCell>
<mxCell id="WIyWlLk6GJQsqaUBKTNV-3" value="Alice appelle Bob" style="rounded=1;whiteSpace=wrap;html=1;fontSize=12;glass=0;strokeWidth=3;shadow=0;strokeColor=light-dark(#000000,#370FFF);" parent="WIyWlLk6GJQsqaUBKTNV-1" vertex="1">
<mxGeometry x="300" y="90" width="120" height="40" as="geometry" />
</mxCell>
<mxCell id="WIyWlLk6GJQsqaUBKTNV-4" value="Yes" style="rounded=0;html=1;jettySize=auto;orthogonalLoop=1;fontSize=11;endArrow=block;endFill=0;endSize=8;strokeWidth=1;shadow=0;labelBackgroundColor=none;edgeStyle=orthogonalEdgeStyle;" parent="WIyWlLk6GJQsqaUBKTNV-1" source="FXGPDhTRSO2FZSW48CnP-0" target="WIyWlLk6GJQsqaUBKTNV-10" edge="1">
<mxGeometry y="20" relative="1" as="geometry">
<mxPoint as="offset" />
</mxGeometry>
</mxCell>
<mxCell id="WIyWlLk6GJQsqaUBKTNV-5" value="No" style="edgeStyle=orthogonalEdgeStyle;rounded=0;html=1;jettySize=auto;orthogonalLoop=1;fontSize=11;endArrow=block;endFill=0;endSize=8;strokeWidth=1;shadow=0;labelBackgroundColor=none;" parent="WIyWlLk6GJQsqaUBKTNV-1" source="WIyWlLk6GJQsqaUBKTNV-6" target="WIyWlLk6GJQsqaUBKTNV-7" edge="1">
<mxGeometry y="10" relative="1" as="geometry">
<mxPoint as="offset" />
</mxGeometry>
</mxCell>
<mxCell id="WIyWlLk6GJQsqaUBKTNV-6" value="Bob répond ?" style="rhombus;whiteSpace=wrap;html=1;shadow=0;fontFamily=Helvetica;fontSize=12;align=center;strokeWidth=1;spacing=6;spacingTop=-4;" parent="WIyWlLk6GJQsqaUBKTNV-1" vertex="1">
<mxGeometry x="310" y="180" width="100" height="80" as="geometry" />
</mxCell>
<mxCell id="WIyWlLk6GJQsqaUBKTNV-7" value="Rien ne se passe" style="rounded=1;whiteSpace=wrap;html=1;fontSize=12;glass=0;strokeWidth=1;shadow=0;" parent="WIyWlLk6GJQsqaUBKTNV-1" vertex="1">
<mxGeometry x="460" y="200" width="120" height="40" as="geometry" />
</mxCell>
<mxCell id="WIyWlLk6GJQsqaUBKTNV-8" value="Négativement" style="rounded=0;html=1;jettySize=auto;orthogonalLoop=1;fontSize=11;endArrow=block;endFill=0;endSize=8;strokeWidth=1;shadow=0;labelBackgroundColor=none;edgeStyle=orthogonalEdgeStyle;" parent="WIyWlLk6GJQsqaUBKTNV-1" source="WIyWlLk6GJQsqaUBKTNV-10" target="WIyWlLk6GJQsqaUBKTNV-11" edge="1">
<mxGeometry y="40" relative="1" as="geometry">
<mxPoint as="offset" />
</mxGeometry>
</mxCell>
<mxCell id="WIyWlLk6GJQsqaUBKTNV-9" value="Positivement" style="edgeStyle=orthogonalEdgeStyle;rounded=0;html=1;jettySize=auto;orthogonalLoop=1;fontSize=11;endArrow=block;endFill=0;endSize=8;strokeWidth=1;shadow=0;labelBackgroundColor=none;" parent="WIyWlLk6GJQsqaUBKTNV-1" source="WIyWlLk6GJQsqaUBKTNV-10" target="WIyWlLk6GJQsqaUBKTNV-12" edge="1">
<mxGeometry y="10" relative="1" as="geometry">
<mxPoint as="offset" />
</mxGeometry>
</mxCell>
<mxCell id="WIyWlLk6GJQsqaUBKTNV-10" value="Bob ping..." style="whiteSpace=wrap;html=1;shadow=0;fontFamily=Helvetica;fontSize=12;align=center;strokeWidth=1;spacing=6;spacingTop=-4;shape=hexagon;perimeter=hexagonPerimeter2;fixedSize=1;" parent="WIyWlLk6GJQsqaUBKTNV-1" vertex="1">
<mxGeometry x="310" y="390" width="100" height="80" as="geometry" />
</mxCell>
<mxCell id="WIyWlLk6GJQsqaUBKTNV-11" value="Protocole échoué&lt;div&gt;-&lt;/div&gt;&lt;div&gt;Passage en clair&lt;/div&gt;" style="rounded=1;whiteSpace=wrap;html=1;fontSize=12;glass=0;strokeWidth=3;shadow=0;strokeColor=light-dark(#000000,#FF1414);" parent="WIyWlLk6GJQsqaUBKTNV-1" vertex="1">
<mxGeometry x="300" y="520" width="120" height="50" as="geometry" />
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-7" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=0.5;entryY=0;entryDx=0;entryDy=0;endArrow=block;endFill=0;" parent="WIyWlLk6GJQsqaUBKTNV-1" source="WIyWlLk6GJQsqaUBKTNV-12" target="FXGPDhTRSO2FZSW48CnP-8" edge="1">
<mxGeometry relative="1" as="geometry">
<mxPoint x="599.9999999999998" y="500" as="targetPoint" />
</mxGeometry>
</mxCell>
<mxCell id="WIyWlLk6GJQsqaUBKTNV-12" value="Alice envoi son&lt;span style=&quot;background-color: transparent; color: light-dark(rgb(0, 0, 0), rgb(255, 255, 255));&quot;&gt;&amp;nbsp;handshake a Bob&lt;/span&gt;" style="rounded=1;whiteSpace=wrap;html=1;fontSize=12;glass=0;strokeWidth=1;shadow=0;" parent="WIyWlLk6GJQsqaUBKTNV-1" vertex="1">
<mxGeometry x="540" y="410" width="120" height="40" as="geometry" />
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-1" value="" style="rounded=0;html=1;jettySize=auto;orthogonalLoop=1;fontSize=11;endArrow=block;endFill=0;endSize=8;strokeWidth=1;shadow=0;labelBackgroundColor=none;edgeStyle=orthogonalEdgeStyle;" parent="WIyWlLk6GJQsqaUBKTNV-1" source="WIyWlLk6GJQsqaUBKTNV-6" target="FXGPDhTRSO2FZSW48CnP-0" edge="1">
<mxGeometry y="20" relative="1" as="geometry">
<mxPoint as="offset" />
<mxPoint x="360" y="260" as="sourcePoint" />
<mxPoint x="360" y="390" as="targetPoint" />
</mxGeometry>
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-0" value="Le dialer d&#39;Alice envoi un PING a Bob" style="rounded=1;whiteSpace=wrap;html=1;fontSize=12;glass=0;strokeWidth=1;shadow=0;" parent="WIyWlLk6GJQsqaUBKTNV-1" vertex="1">
<mxGeometry x="300" y="300" width="120" height="40" as="geometry" />
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-2" value="" style="endArrow=block;html=1;rounded=0;exitX=0;exitY=0.5;exitDx=0;exitDy=0;endFill=0;entryX=1;entryY=0.5;entryDx=0;entryDy=0;edgeStyle=orthogonalEdgeStyle;" parent="WIyWlLk6GJQsqaUBKTNV-1" source="WIyWlLk6GJQsqaUBKTNV-10" target="FXGPDhTRSO2FZSW48CnP-4" edge="1">
<mxGeometry width="50" height="50" relative="1" as="geometry">
<mxPoint x="480" y="450" as="sourcePoint" />
<mxPoint x="190" y="430" as="targetPoint" />
<Array as="points">
<mxPoint x="280" y="430" />
<mxPoint x="280" y="460" />
</Array>
</mxGeometry>
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-3" value="Ne ping pas" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FXGPDhTRSO2FZSW48CnP-2" vertex="1" connectable="0">
<mxGeometry x="-0.2695" relative="1" as="geometry">
<mxPoint x="-16" y="-10" as="offset" />
</mxGeometry>
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-5" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" parent="WIyWlLk6GJQsqaUBKTNV-1" source="FXGPDhTRSO2FZSW48CnP-4" target="FXGPDhTRSO2FZSW48CnP-0" edge="1">
<mxGeometry relative="1" as="geometry">
<Array as="points">
<mxPoint x="130" y="320" />
</Array>
</mxGeometry>
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-6" value="Reping" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FXGPDhTRSO2FZSW48CnP-5" vertex="1" connectable="0">
<mxGeometry x="0.0817" relative="1" as="geometry">
<mxPoint x="23" y="-10" as="offset" />
</mxGeometry>
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-4" value="Attendre 1s ? 0.5s ?" style="rounded=1;whiteSpace=wrap;html=1;" parent="WIyWlLk6GJQsqaUBKTNV-1" vertex="1">
<mxGeometry x="70" y="440" width="120" height="40" as="geometry" />
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-12" value="" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0;exitY=0.5;exitDx=0;exitDy=0;entryX=1;entryY=0.5;entryDx=0;entryDy=0;endArrow=block;endFill=0;" parent="WIyWlLk6GJQsqaUBKTNV-1" source="FXGPDhTRSO2FZSW48CnP-8" target="FXGPDhTRSO2FZSW48CnP-11" edge="1">
<mxGeometry relative="1" as="geometry">
<mxPoint x="530.0380952380951" y="585.0038095238094" as="sourcePoint" />
</mxGeometry>
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-13" value="Non" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FXGPDhTRSO2FZSW48CnP-12" vertex="1" connectable="0">
<mxGeometry x="-0.4964" y="-1" relative="1" as="geometry">
<mxPoint as="offset" />
</mxGeometry>
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-43" value="" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;endArrow=block;endFill=0;strokeColor=light-dark(#000000,#FF0000);" parent="WIyWlLk6GJQsqaUBKTNV-1" source="FXGPDhTRSO2FZSW48CnP-8" target="FXGPDhTRSO2FZSW48CnP-27" edge="1">
<mxGeometry relative="1" as="geometry">
<mxPoint x="600" y="800" as="targetPoint" />
<Array as="points">
<mxPoint x="600" y="660" />
<mxPoint x="570" y="660" />
<mxPoint x="570" y="700" />
<mxPoint x="210" y="700" />
</Array>
</mxGeometry>
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-44" value="Oui" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FXGPDhTRSO2FZSW48CnP-43" vertex="1" connectable="0">
<mxGeometry x="-0.8049" y="1" relative="1" as="geometry">
<mxPoint x="8" y="-25" as="offset" />
</mxGeometry>
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-8" value="Bob reconnait la clé publique d&#39;Alice ?" style="rhombus;whiteSpace=wrap;html=1;fontSize=10;" parent="WIyWlLk6GJQsqaUBKTNV-1" vertex="1">
<mxGeometry x="540" y="545" width="120" height="75" as="geometry" />
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-16" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;endArrow=block;endFill=0;entryX=0.5;entryY=1;entryDx=0;entryDy=0;" parent="WIyWlLk6GJQsqaUBKTNV-1" source="FXGPDhTRSO2FZSW48CnP-11" target="WIyWlLk6GJQsqaUBKTNV-11" edge="1">
<mxGeometry relative="1" as="geometry">
<mxPoint x="360" y="600" as="targetPoint" />
</mxGeometry>
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-17" value="Non" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FXGPDhTRSO2FZSW48CnP-16" vertex="1" connectable="0">
<mxGeometry x="-0.1233" y="-2" relative="1" as="geometry">
<mxPoint as="offset" />
</mxGeometry>
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-20" value="" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;endArrow=block;endFill=0;" parent="WIyWlLk6GJQsqaUBKTNV-1" source="FXGPDhTRSO2FZSW48CnP-11" target="FXGPDhTRSO2FZSW48CnP-19" edge="1">
<mxGeometry relative="1" as="geometry" />
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-21" value="Oui" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FXGPDhTRSO2FZSW48CnP-20" vertex="1" connectable="0">
<mxGeometry x="-0.275" y="1" relative="1" as="geometry">
<mxPoint as="offset" />
</mxGeometry>
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-11" value="Bob accepte la clé d&#39;Alice?" style="rhombus;whiteSpace=wrap;html=1;fontSize=10;" parent="WIyWlLk6GJQsqaUBKTNV-1" vertex="1">
<mxGeometry x="354" y="620" width="120" height="75" as="geometry" />
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-23" value="" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;endArrow=block;endFill=0;" parent="WIyWlLk6GJQsqaUBKTNV-1" source="FXGPDhTRSO2FZSW48CnP-19" target="FXGPDhTRSO2FZSW48CnP-22" edge="1">
<mxGeometry relative="1" as="geometry" />
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-19" value="Bob envoi sa clé publique en handshake" style="whiteSpace=wrap;html=1;fontSize=10;rounded=1;" parent="WIyWlLk6GJQsqaUBKTNV-1" vertex="1">
<mxGeometry x="160" y="636.25" width="120" height="42.5" as="geometry" />
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-24" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=0;entryY=0.5;entryDx=0;entryDy=0;endArrow=block;endFill=0;" parent="WIyWlLk6GJQsqaUBKTNV-1" source="FXGPDhTRSO2FZSW48CnP-22" target="WIyWlLk6GJQsqaUBKTNV-11" edge="1">
<mxGeometry relative="1" as="geometry">
<Array as="points">
<mxPoint x="70" y="545" />
</Array>
</mxGeometry>
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-25" value="Non" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FXGPDhTRSO2FZSW48CnP-24" vertex="1" connectable="0">
<mxGeometry x="-0.7543" y="3" relative="1" as="geometry">
<mxPoint as="offset" />
</mxGeometry>
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-28" value="" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;endArrow=block;endFill=0;" parent="WIyWlLk6GJQsqaUBKTNV-1" source="FXGPDhTRSO2FZSW48CnP-22" target="FXGPDhTRSO2FZSW48CnP-27" edge="1">
<mxGeometry relative="1" as="geometry">
<Array as="points">
<mxPoint x="70" y="750" />
</Array>
</mxGeometry>
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-22" value="Alice accepte la clé publique de Bob&lt;span style=&quot;background-color: transparent; color: light-dark(rgb(0, 0, 0), rgb(255, 255, 255));&quot;&gt;&amp;nbsp;?&lt;/span&gt;" style="rhombus;whiteSpace=wrap;html=1;fontSize=10;rounded=0;" parent="WIyWlLk6GJQsqaUBKTNV-1" vertex="1">
<mxGeometry x="30" y="617.5" width="80" height="80" as="geometry" />
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-47" value="" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;" parent="WIyWlLk6GJQsqaUBKTNV-1" source="FXGPDhTRSO2FZSW48CnP-26" target="FXGPDhTRSO2FZSW48CnP-46" edge="1">
<mxGeometry relative="1" as="geometry" />
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-26" value="Alice et Bob sont d&#39;accord sur la clé symmétrique a utiliser" style="rounded=0;whiteSpace=wrap;html=1;" parent="WIyWlLk6GJQsqaUBKTNV-1" vertex="1">
<mxGeometry x="340" y="820" width="120" height="60" as="geometry" />
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-31" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;endArrow=block;endFill=0;" parent="WIyWlLk6GJQsqaUBKTNV-1" source="FXGPDhTRSO2FZSW48CnP-27" target="FXGPDhTRSO2FZSW48CnP-30" edge="1">
<mxGeometry relative="1" as="geometry" />
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-45" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=0;entryY=0.5;entryDx=0;entryDy=0;strokeColor=light-dark(#000000,#FF1616);endArrow=block;endFill=0;" parent="WIyWlLk6GJQsqaUBKTNV-1" source="FXGPDhTRSO2FZSW48CnP-27" target="FXGPDhTRSO2FZSW48CnP-26" edge="1">
<mxGeometry relative="1" as="geometry">
<Array as="points">
<mxPoint x="210" y="850" />
</Array>
</mxGeometry>
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-27" value="Alice et Bob calculent le secret partagé de leur côté" style="whiteSpace=wrap;html=1;fontSize=10;rounded=1;" parent="WIyWlLk6GJQsqaUBKTNV-1" vertex="1">
<mxGeometry x="150" y="720" width="120" height="50" as="geometry" />
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-33" value="" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=0.5;entryY=1;entryDx=0;entryDy=0;jumpStyle=sharp;" parent="WIyWlLk6GJQsqaUBKTNV-1" source="FXGPDhTRSO2FZSW48CnP-29" target="FXGPDhTRSO2FZSW48CnP-34" edge="1">
<mxGeometry relative="1" as="geometry">
<mxPoint x="640" y="680" as="targetPoint" />
<Array as="points">
<mxPoint x="700" y="745" />
</Array>
</mxGeometry>
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-36" value="Non" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FXGPDhTRSO2FZSW48CnP-33" vertex="1" connectable="0">
<mxGeometry x="-0.1536" y="1" relative="1" as="geometry">
<mxPoint as="offset" />
</mxGeometry>
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-41" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=1;entryY=0.5;entryDx=0;entryDy=0;" parent="WIyWlLk6GJQsqaUBKTNV-1" source="FXGPDhTRSO2FZSW48CnP-29" target="FXGPDhTRSO2FZSW48CnP-26" edge="1">
<mxGeometry relative="1" as="geometry">
<Array as="points">
<mxPoint x="525" y="798" />
<mxPoint x="510" y="798" />
<mxPoint x="510" y="850" />
</Array>
</mxGeometry>
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-42" value="Oui" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FXGPDhTRSO2FZSW48CnP-41" vertex="1" connectable="0">
<mxGeometry x="-0.7774" y="1" relative="1" as="geometry">
<mxPoint as="offset" />
</mxGeometry>
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-29" value="Ils sont d&#39;accord ?" style="rhombus;whiteSpace=wrap;html=1;" parent="WIyWlLk6GJQsqaUBKTNV-1" vertex="1">
<mxGeometry x="460" y="715" width="130" height="60" as="geometry" />
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-32" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" parent="WIyWlLk6GJQsqaUBKTNV-1" source="FXGPDhTRSO2FZSW48CnP-30" target="FXGPDhTRSO2FZSW48CnP-29" edge="1">
<mxGeometry relative="1" as="geometry" />
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-30" value="Alice et Bob lisent à haute voix la phrase de sécurité" style="whiteSpace=wrap;html=1;fontSize=10;rounded=1;" parent="WIyWlLk6GJQsqaUBKTNV-1" vertex="1">
<mxGeometry x="300" y="720" width="120" height="50" as="geometry" />
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-37" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=1;entryY=0.5;entryDx=0;entryDy=0;" parent="WIyWlLk6GJQsqaUBKTNV-1" source="FXGPDhTRSO2FZSW48CnP-34" target="WIyWlLk6GJQsqaUBKTNV-12" edge="1">
<mxGeometry relative="1" as="geometry" />
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-38" value="Oui" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FXGPDhTRSO2FZSW48CnP-37" vertex="1" connectable="0">
<mxGeometry x="-0.3086" y="-2" relative="1" as="geometry">
<mxPoint as="offset" />
</mxGeometry>
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-39" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=1;entryY=0.5;entryDx=0;entryDy=0;jumpStyle=sharp;jumpSize=8;" parent="WIyWlLk6GJQsqaUBKTNV-1" source="FXGPDhTRSO2FZSW48CnP-34" target="WIyWlLk6GJQsqaUBKTNV-11" edge="1">
<mxGeometry relative="1" as="geometry">
<Array as="points">
<mxPoint x="530" y="690" />
<mxPoint x="530" y="545" />
</Array>
</mxGeometry>
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-40" value="Non" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FXGPDhTRSO2FZSW48CnP-39" vertex="1" connectable="0">
<mxGeometry x="-0.7617" relative="1" as="geometry">
<mxPoint as="offset" />
</mxGeometry>
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-34" value="Ré-essayer ?" style="rhombus;whiteSpace=wrap;html=1;" parent="WIyWlLk6GJQsqaUBKTNV-1" vertex="1">
<mxGeometry x="660" y="650" width="80" height="80" as="geometry" />
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-49" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=0.5;entryY=0;entryDx=0;entryDy=0;" parent="WIyWlLk6GJQsqaUBKTNV-1" source="FXGPDhTRSO2FZSW48CnP-46" target="FXGPDhTRSO2FZSW48CnP-48" edge="1">
<mxGeometry relative="1" as="geometry" />
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-46" value="Alice et Bob utilisent la clé symmétrique pour chiffrer leurs transmissions" style="whiteSpace=wrap;html=1;rounded=0;" parent="WIyWlLk6GJQsqaUBKTNV-1" vertex="1">
<mxGeometry x="340" y="920" width="120" height="60" as="geometry" />
</mxCell>
<mxCell id="FXGPDhTRSO2FZSW48CnP-48" value="" style="rhombus;whiteSpace=wrap;html=1;" parent="WIyWlLk6GJQsqaUBKTNV-1" vertex="1">
<mxGeometry x="360" y="1020" width="80" height="80" as="geometry" />
</mxCell>
</root>
</mxGraphModel>
</diagram>
<diagram id="4Sb7mgJDpsadGym-U4wz" name="Echanges">
<mxGraphModel dx="1195" dy="683" grid="1" gridSize="10" guides="1" tooltips="1" connect="1" arrows="1" fold="1" page="1" pageScale="1" pageWidth="850" pageHeight="1100" math="0" shadow="0">
<root>
<mxCell id="0" />
<mxCell id="1" parent="0" />
<mxCell id="b_xV4iUWIxmdZCAYY4YR-1" value="" style="html=1;shadow=0;dashed=0;align=center;verticalAlign=middle;shape=mxgraph.arrows2.arrow;dy=0;dx=10;notch=0;" parent="1" vertex="1">
<mxGeometry x="160" y="120" width="440" height="120" as="geometry" />
</mxCell>
<mxCell id="O_eM33N56VtHnDaMz1H4-1" value="ALICE" style="shape=umlLifeline;perimeter=lifelinePerimeter;whiteSpace=wrap;html=1;container=1;dropTarget=0;collapsible=0;recursiveResize=0;outlineConnect=0;portConstraint=eastwest;newEdgeStyle={&quot;curved&quot;:0,&quot;rounded&quot;:0};participant=umlEntity;strokeWidth=2;" parent="1" vertex="1">
<mxGeometry x="120" y="40" width="40" height="3110" as="geometry" />
</mxCell>
<mxCell id="O_eM33N56VtHnDaMz1H4-2" value="BOB" style="shape=umlLifeline;perimeter=lifelinePerimeter;whiteSpace=wrap;html=1;container=1;dropTarget=0;collapsible=0;recursiveResize=0;outlineConnect=0;portConstraint=eastwest;newEdgeStyle={&quot;curved&quot;:0,&quot;rounded&quot;:0};participant=umlEntity;strokeWidth=2;" parent="1" vertex="1">
<mxGeometry x="690" y="40" width="40" height="3110" as="geometry" />
</mxCell>
<mxCell id="b_xV4iUWIxmdZCAYY4YR-2" value="PING" style="text;html=1;align=center;verticalAlign=middle;resizable=0;points=[];autosize=1;strokeColor=none;fillColor=none;fontSize=23;" parent="1" vertex="1">
<mxGeometry x="385" y="65" width="80" height="40" as="geometry" />
</mxCell>
<mxCell id="n3lF8vaYaHAhAfaeaFZn-1" value="Nonce" style="swimlane;whiteSpace=wrap;html=1;" parent="1" vertex="1">
<mxGeometry x="180" y="130" width="105" height="80" as="geometry">
<mxRectangle x="210" y="130" width="80" height="30" as="alternateBounds" />
</mxGeometry>
</mxCell>
<mxCell id="n3lF8vaYaHAhAfaeaFZn-6" value="&lt;div&gt;sha256 (&lt;/div&gt;&lt;div&gt;numéro alice +&lt;/div&gt;&lt;div&gt;numéro bob +&lt;/div&gt;&lt;div&gt;timestamp +&lt;/div&gt;&lt;div&gt;random&lt;br&gt;&lt;/div&gt;&lt;div&gt;) / ~2 (left part)&lt;br&gt;&lt;/div&gt;" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;fontSize=7;" parent="n3lF8vaYaHAhAfaeaFZn-1" vertex="1">
<mxGeometry y="25" width="100" height="55" as="geometry" />
</mxCell>
<mxCell id="n3lF8vaYaHAhAfaeaFZn-2" value="Version" style="swimlane;whiteSpace=wrap;html=1;" parent="1" vertex="1">
<mxGeometry x="305" y="130" width="58.75" height="80" as="geometry" />
</mxCell>
<mxCell id="pWkGvNQAXuiST1IiWYlx-1" value="(0-128)" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" vertex="1" parent="n3lF8vaYaHAhAfaeaFZn-2">
<mxGeometry x="3.75" y="30" width="51.25" height="25" as="geometry" />
</mxCell>
<mxCell id="n3lF8vaYaHAhAfaeaFZn-4" value="Checksum" style="swimlane;whiteSpace=wrap;html=1;" parent="1" vertex="1">
<mxGeometry x="455" y="130" width="90" height="80" as="geometry" />
</mxCell>
<mxCell id="n3lF8vaYaHAhAfaeaFZn-7" value="CRC-32" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="n3lF8vaYaHAhAfaeaFZn-4" vertex="1">
<mxGeometry x="15" y="25" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="XvZTtdEB18xY6m2a5fJO-1" value="" style="html=1;shadow=0;dashed=0;align=center;verticalAlign=middle;shape=mxgraph.arrows2.arrow;dy=0;dx=10;notch=0;rotation=-180;" parent="1" vertex="1">
<mxGeometry x="280" y="280" width="410" height="190" as="geometry" />
</mxCell>
<mxCell id="XvZTtdEB18xY6m2a5fJO-2" value="Timestamp" style="swimlane;whiteSpace=wrap;html=1;" parent="1" vertex="1">
<mxGeometry x="300" y="290" width="90" height="60" as="geometry">
<mxRectangle x="210" y="130" width="80" height="30" as="alternateBounds" />
</mxGeometry>
</mxCell>
<mxCell id="XvZTtdEB18xY6m2a5fJO-3" value="timestamp" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;fontSize=10;strokeWidth=1;" parent="XvZTtdEB18xY6m2a5fJO-2" vertex="1">
<mxGeometry x="11.25" y="27.5" width="67.5" height="25" as="geometry" />
</mxCell>
<mxCell id="XvZTtdEB18xY6m2a5fJO-4" value="Version" style="swimlane;whiteSpace=wrap;html=1;" parent="1" vertex="1">
<mxGeometry x="405.63" y="290" width="58.75" height="60" as="geometry" />
</mxCell>
<mxCell id="XvZTtdEB18xY6m2a5fJO-11" value="0" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="XvZTtdEB18xY6m2a5fJO-4" vertex="1">
<mxGeometry y="25" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="XvZTtdEB18xY6m2a5fJO-5" value="Checksum" style="swimlane;whiteSpace=wrap;html=1;" parent="1" vertex="1">
<mxGeometry x="590" y="380" width="85" height="60" as="geometry" />
</mxCell>
<mxCell id="XvZTtdEB18xY6m2a5fJO-6" value="CRC-32" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="XvZTtdEB18xY6m2a5fJO-5" vertex="1">
<mxGeometry x="12.5" y="25" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="XvZTtdEB18xY6m2a5fJO-9" value="Answer" style="swimlane;whiteSpace=wrap;html=1;" parent="1" vertex="1">
<mxGeometry x="482.5" y="290" width="57.5" height="60" as="geometry" />
</mxCell>
<mxCell id="XvZTtdEB18xY6m2a5fJO-10" value="&lt;div&gt;YES&lt;/div&gt;&lt;div&gt;NO&lt;br&gt;&lt;/div&gt;" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="1" vertex="1">
<mxGeometry x="484.38" y="315" width="53.75" height="30" as="geometry" />
</mxCell>
<mxCell id="XvZTtdEB18xY6m2a5fJO-13" value="HANDSHAKE" style="text;html=1;align=center;verticalAlign=middle;resizable=0;points=[];autosize=1;strokeColor=none;fillColor=none;fontSize=23;" parent="1" vertex="1">
<mxGeometry x="350" y="510" width="170" height="40" as="geometry" />
</mxCell>
<mxCell id="XvZTtdEB18xY6m2a5fJO-14" value="" style="html=1;shadow=0;dashed=0;align=center;verticalAlign=middle;shape=mxgraph.arrows2.arrow;dy=0;dx=10;notch=0;" parent="1" vertex="1">
<mxGeometry x="160" y="570" width="410" height="220" as="geometry" />
</mxCell>
<mxCell id="XvZTtdEB18xY6m2a5fJO-15" value="Clé éphémère" style="swimlane;whiteSpace=wrap;html=1;" parent="1" vertex="1">
<mxGeometry x="170" y="580" width="105" height="80" as="geometry">
<mxRectangle x="210" y="130" width="80" height="30" as="alternateBounds" />
</mxGeometry>
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-13" value="Clé (publique) générée aléatoirement" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;fontSize=11;" parent="XvZTtdEB18xY6m2a5fJO-15" vertex="1">
<mxGeometry y="30" width="100" height="40" as="geometry" />
</mxCell>
<mxCell id="XvZTtdEB18xY6m2a5fJO-17" value="Signature" style="swimlane;whiteSpace=wrap;html=1;" parent="1" vertex="1">
<mxGeometry x="285" y="580" width="105" height="80" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-14" value="PubkeyFixe. sign(clé éphémère)" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="XvZTtdEB18xY6m2a5fJO-17" vertex="1">
<mxGeometry y="20" width="100" height="60" as="geometry" />
</mxCell>
<mxCell id="XvZTtdEB18xY6m2a5fJO-18" value="Checksum" style="swimlane;whiteSpace=wrap;html=1;" parent="1" vertex="1">
<mxGeometry x="486.25" y="580" width="65" height="80" as="geometry" />
</mxCell>
<mxCell id="XvZTtdEB18xY6m2a5fJO-19" value="CRC-32" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="XvZTtdEB18xY6m2a5fJO-18" vertex="1">
<mxGeometry x="2.5" y="30" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-15" value="PFS" style="swimlane;whiteSpace=wrap;html=1;" parent="1" vertex="1">
<mxGeometry x="402.81" y="580" width="71.88" height="80" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-16" value="hash( preuve de convo précédente)" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;fontSize=10;" parent="pP7SjZfcCiBg3d1TCkzP-15" vertex="1">
<mxGeometry x="6.57" y="30" width="60" height="40" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-17" value="" style="html=1;shadow=0;dashed=0;align=center;verticalAlign=middle;shape=mxgraph.arrows2.arrow;dy=0;dx=10;notch=0;rotation=-180;" parent="1" vertex="1">
<mxGeometry x="285" y="830" width="410" height="180" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-43" value="Clé éphémère" style="swimlane;whiteSpace=wrap;html=1;" parent="1" vertex="1">
<mxGeometry x="305" y="840" width="105" height="80" as="geometry">
<mxRectangle x="210" y="130" width="80" height="30" as="alternateBounds" />
</mxGeometry>
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-44" value="Clé (publique) générée aléatoirement" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;fontSize=11;" parent="pP7SjZfcCiBg3d1TCkzP-43" vertex="1">
<mxGeometry y="30" width="100" height="40" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-45" value="Signature" style="swimlane;whiteSpace=wrap;html=1;" parent="1" vertex="1">
<mxGeometry x="420" y="840" width="105" height="80" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-46" value="PubkeyFixe. sign(clé éphémère)" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="pP7SjZfcCiBg3d1TCkzP-45" vertex="1">
<mxGeometry y="20" width="100" height="60" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-47" value="Checksum" style="swimlane;whiteSpace=wrap;html=1;" parent="1" vertex="1">
<mxGeometry x="621.25" y="840" width="65" height="80" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-48" value="CRC-32" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="pP7SjZfcCiBg3d1TCkzP-47" vertex="1">
<mxGeometry x="2.5" y="30" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-49" value="PFS" style="swimlane;whiteSpace=wrap;html=1;" parent="1" vertex="1">
<mxGeometry x="537.81" y="840" width="71.88" height="80" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-50" value="hash( preuve de convo précédente )" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;fontSize=10;" parent="pP7SjZfcCiBg3d1TCkzP-49" vertex="1">
<mxGeometry x="6.57" y="30" width="60" height="40" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-54" value="Timestamp" style="swimlane;whiteSpace=wrap;html=1;" parent="1" vertex="1">
<mxGeometry x="182.5" y="690" width="80" height="70" as="geometry">
<mxRectangle x="210" y="130" width="80" height="30" as="alternateBounds" />
</mxGeometry>
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-55" value="timestamp" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;fontSize=10;strokeWidth=1;" parent="pP7SjZfcCiBg3d1TCkzP-54" vertex="1">
<mxGeometry x="6.25" y="32.5" width="67.5" height="25" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-56" value="Timestamp" style="swimlane;whiteSpace=wrap;html=1;" parent="1" vertex="1">
<mxGeometry x="606.25" y="930" width="80" height="70" as="geometry">
<mxRectangle x="210" y="130" width="80" height="30" as="alternateBounds" />
</mxGeometry>
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-57" value="timestamp" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;fontSize=10;strokeWidth=1;" parent="pP7SjZfcCiBg3d1TCkzP-56" vertex="1">
<mxGeometry x="6.25" y="32.5" width="67.5" height="25" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-58" value="" style="html=1;shadow=0;dashed=0;align=center;verticalAlign=middle;shape=mxgraph.arrows2.arrow;dy=0;dx=10;notch=0;" parent="1" vertex="1">
<mxGeometry x="160" y="1160" width="450" height="200" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-59" value="ENCRYPTED COMS" style="text;html=1;align=center;verticalAlign=middle;resizable=0;points=[];autosize=1;strokeColor=none;fillColor=none;fontSize=23;" parent="1" vertex="1">
<mxGeometry x="305" y="1100" width="240" height="40" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-60" value="129b" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="1" vertex="1">
<mxGeometry x="200" y="210" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-61" value="7b" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="1" vertex="1">
<mxGeometry x="303.75" y="210" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-62" value="32b" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="1" vertex="1">
<mxGeometry x="470" y="210" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-63" value="= 172b" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="1" vertex="1">
<mxGeometry x="530" y="210" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-66" value="32b" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="1" vertex="1">
<mxGeometry x="313" y="350" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-67" value="7b" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="1" vertex="1">
<mxGeometry x="406.75" y="350" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-68" value="1b" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="1" vertex="1">
<mxGeometry x="479.25" y="350" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-69" value="32b" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="1" vertex="1">
<mxGeometry x="600.5" y="440" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-70" value="= 76b" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="1" vertex="1">
<mxGeometry x="426.25" y="420" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-71" value="264b" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="1" vertex="1">
<mxGeometry x="193" y="660" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-72" value="512b" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="1" vertex="1">
<mxGeometry x="307.5" y="660" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-73" value="256b" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="1" vertex="1">
<mxGeometry x="409.38" y="660" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-74" value="32b" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="1" vertex="1">
<mxGeometry x="488.75" y="660" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-75" value="32b" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="1" vertex="1">
<mxGeometry x="192.5" y="760" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-76" value="=1096b" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="1" vertex="1">
<mxGeometry x="315" y="750" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="pP7SjZfcCiBg3d1TCkzP-77" value="=1096b" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="1" vertex="1">
<mxGeometry x="327.5" y="970" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="_H5URFloX_BVB2BL7kO6-1" value="CRC ?" style="swimlane;whiteSpace=wrap;html=1;fillColor=#008a00;fontColor=#ffffff;strokeColor=#005700;" parent="1" vertex="1">
<mxGeometry x="375" y="1270" width="63.25" height="60" as="geometry" />
</mxCell>
<mxCell id="_H5URFloX_BVB2BL7kO6-2" value="CRC-32" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="_H5URFloX_BVB2BL7kO6-1" vertex="1">
<mxGeometry x="1.6199999999999992" y="25" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="_H5URFloX_BVB2BL7kO6-3" value="Flag" style="swimlane;whiteSpace=wrap;html=1;fillColor=#008a00;fontColor=#ffffff;strokeColor=#005700;" parent="1" vertex="1">
<mxGeometry x="180" y="1170" width="65" height="60" as="geometry" />
</mxCell>
<mxCell id="_H5URFloX_BVB2BL7kO6-4" value="To determine" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="_H5URFloX_BVB2BL7kO6-3" vertex="1">
<mxGeometry x="2.5" y="25" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="_H5URFloX_BVB2BL7kO6-5" value="nbretry" style="swimlane;whiteSpace=wrap;html=1;" parent="1" vertex="1">
<mxGeometry x="344.38" y="1170" width="65" height="60" as="geometry" />
</mxCell>
<mxCell id="_H5URFloX_BVB2BL7kO6-6" value="y" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="_H5URFloX_BVB2BL7kO6-5" vertex="1">
<mxGeometry x="2.5" y="25" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="_H5URFloX_BVB2BL7kO6-7" value="msg_len" style="swimlane;whiteSpace=wrap;html=1;fillColor=#008a00;fontColor=#ffffff;strokeColor=#005700;" parent="1" vertex="1">
<mxGeometry x="262.5" y="1170" width="65" height="60" as="geometry">
<mxRectangle x="262.5" y="1170" width="90" height="30" as="alternateBounds" />
</mxGeometry>
</mxCell>
<mxCell id="_H5URFloX_BVB2BL7kO6-8" value="XXX" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="_H5URFloX_BVB2BL7kO6-7" vertex="1">
<mxGeometry x="2.5" y="25" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="_H5URFloX_BVB2BL7kO6-9" value="msg" style="swimlane;whiteSpace=wrap;html=1;fillColor=#0050ef;fontColor=#ffffff;strokeColor=#001DBC;" parent="1" vertex="1">
<mxGeometry x="187.5" y="1270" width="65" height="60" as="geometry" />
</mxCell>
<mxCell id="_H5URFloX_BVB2BL7kO6-10" value="BBB" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="_H5URFloX_BVB2BL7kO6-9" vertex="1">
<mxGeometry x="2.5" y="30" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="_H5URFloX_BVB2BL7kO6-11" value="16b" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="1" vertex="1">
<mxGeometry x="180" y="1230" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="_H5URFloX_BVB2BL7kO6-12" value="8b" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="1" vertex="1">
<mxGeometry x="349.38" y="1230" width="55" height="30" as="geometry" />
</mxCell>
<mxCell id="_H5URFloX_BVB2BL7kO6-13" value="16b" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="1" vertex="1">
<mxGeometry x="267.5" y="1230" width="55" height="30" as="geometry" />
</mxCell>
<mxCell id="_H5URFloX_BVB2BL7kO6-14" value="96b" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="1" vertex="1">
<mxGeometry x="510" y="1230" width="55" height="30" as="geometry" />
</mxCell>
<mxCell id="_H5URFloX_BVB2BL7kO6-15" value="32b" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="1" vertex="1">
<mxGeometry x="379.12" y="1330" width="55" height="30" as="geometry" />
</mxCell>
<mxCell id="_H5URFloX_BVB2BL7kO6-16" value="= (180b ~ 212b) + yyy" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" parent="1" vertex="1">
<mxGeometry x="465" y="1285" width="130" height="30" as="geometry" />
</mxCell>
<mxCell id="pWkGvNQAXuiST1IiWYlx-2" value="Cypher" style="swimlane;whiteSpace=wrap;html=1;" vertex="1" parent="1">
<mxGeometry x="375" y="130" width="58.75" height="80" as="geometry" />
</mxCell>
<mxCell id="pWkGvNQAXuiST1IiWYlx-3" value="(0-16)" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" vertex="1" parent="pWkGvNQAXuiST1IiWYlx-2">
<mxGeometry x="3.75" y="30" width="51.25" height="25" as="geometry" />
</mxCell>
<mxCell id="pWkGvNQAXuiST1IiWYlx-4" value="4b" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" vertex="1" parent="1">
<mxGeometry x="375" y="210" width="55" height="30" as="geometry" />
</mxCell>
<mxCell id="pWkGvNQAXuiST1IiWYlx-5" value="Cypher" style="swimlane;whiteSpace=wrap;html=1;" vertex="1" parent="1">
<mxGeometry x="600" y="290" width="58.75" height="60" as="geometry" />
</mxCell>
<mxCell id="pWkGvNQAXuiST1IiWYlx-6" value="(0-16)" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" vertex="1" parent="pWkGvNQAXuiST1IiWYlx-5">
<mxGeometry x="3.75" y="30" width="51.25" height="25" as="geometry" />
</mxCell>
<mxCell id="pWkGvNQAXuiST1IiWYlx-7" value="4b" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" vertex="1" parent="1">
<mxGeometry x="601.88" y="350" width="55" height="30" as="geometry" />
</mxCell>
<mxCell id="pWkGvNQAXuiST1IiWYlx-8" value="status" style="swimlane;whiteSpace=wrap;html=1;" vertex="1" parent="1">
<mxGeometry x="425" y="1170" width="65" height="60" as="geometry" />
</mxCell>
<mxCell id="pWkGvNQAXuiST1IiWYlx-9" value="CRC ?" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" vertex="1" parent="pWkGvNQAXuiST1IiWYlx-8">
<mxGeometry x="2.5" y="25" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="pWkGvNQAXuiST1IiWYlx-10" value="4b" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" vertex="1" parent="1">
<mxGeometry x="428.75" y="1230" width="55" height="30" as="geometry" />
</mxCell>
<mxCell id="pWkGvNQAXuiST1IiWYlx-11" value="iv" style="swimlane;whiteSpace=wrap;html=1;" vertex="1" parent="1">
<mxGeometry x="505" y="1170" width="65" height="60" as="geometry" />
</mxCell>
<mxCell id="pWkGvNQAXuiST1IiWYlx-12" value="random&lt;div&gt;(+Z)&lt;/div&gt;" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" vertex="1" parent="pWkGvNQAXuiST1IiWYlx-11">
<mxGeometry x="2.5" y="25" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="pWkGvNQAXuiST1IiWYlx-13" value="BBB b" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" vertex="1" parent="1">
<mxGeometry x="193" y="1330" width="55" height="30" as="geometry" />
</mxCell>
<mxCell id="pWkGvNQAXuiST1IiWYlx-14" value="MAC" style="swimlane;whiteSpace=wrap;html=1;fillColor=#008a00;fontColor=#ffffff;strokeColor=#005700;" vertex="1" parent="1">
<mxGeometry x="286.13" y="1270" width="63.25" height="60" as="geometry" />
</mxCell>
<mxCell id="pWkGvNQAXuiST1IiWYlx-15" value="AEAD" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" vertex="1" parent="pWkGvNQAXuiST1IiWYlx-14">
<mxGeometry x="1.6199999999999992" y="25" width="60" height="30" as="geometry" />
</mxCell>
<mxCell id="pWkGvNQAXuiST1IiWYlx-16" value="128b" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" vertex="1" parent="1">
<mxGeometry x="290.25" y="1330" width="55" height="30" as="geometry" />
</mxCell>
<mxCell id="pWkGvNQAXuiST1IiWYlx-17" value="Green = clear data" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;fillColor=#008a00;fontColor=#ffffff;strokeColor=#005700;" vertex="1" parent="1">
<mxGeometry x="10" y="1170" width="110" height="30" as="geometry" />
</mxCell>
<mxCell id="pWkGvNQAXuiST1IiWYlx-18" value="&lt;font style=&quot;color: light-dark(rgb(0, 0, 0), rgb(255, 255, 255));&quot;&gt;White = additional data&lt;/font&gt;" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;fillColor=none;strokeColor=light-dark(#6C8EBF,#FFFFFF);" vertex="1" parent="1">
<mxGeometry y="1220" width="130" height="30" as="geometry" />
</mxCell>
<mxCell id="pWkGvNQAXuiST1IiWYlx-19" value="Blue = encrypted data" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;fillColor=#0050ef;fontColor=#ffffff;strokeColor=#001DBC;" vertex="1" parent="1">
<mxGeometry x="10" y="1270" width="110" height="30" as="geometry" />
</mxCell>
</root>
</mxGraphModel>
</diagram>
</mxfile>

View File

@ -0,0 +1,119 @@
# Voice-over-GSM Protocol Implementation
This implementation provides encrypted voice communication over standard GSM voice channels without requiring CSD/HSCSD.
## Architecture
### 1. Voice Codec (`voice_codec.py`)
- **Codec2Wrapper**: Simulates Codec2 compression
- Supports multiple bitrates (700-3200 bps)
- Default: 1200 bps for GSM robustness
- 40ms frames (48 bits/frame at 1200 bps)
- **FSKModem**: 4-FSK modulation for voice channels
- Frequency band: 300-3400 Hz (GSM compatible)
- Symbol rate: 600 baud
- 4 frequencies: 600, 1200, 1800, 2400 Hz
- Preamble: 800 Hz for 100ms
- **VoiceProtocol**: Integration layer
- Manages codec and modem
- Handles encryption with ChaCha20-CTR
- Frame-based processing
### 2. Protocol Messages (`messages.py`)
- **VoiceStart** (20 bytes): Initiates voice call
- Version, codec mode, FEC type
- Session ID (64 bits)
- Initial sequence number
- **VoiceAck** (16 bytes): Accepts/rejects call
- Status (accept/reject)
- Negotiated codec and FEC
- **VoiceEnd** (12 bytes): Terminates call
- Session ID for confirmation
- **VoiceSync** (20 bytes): Synchronization
- Sequence number and timestamp
- For jitter buffer management
### 3. Encryption (`encryption.py`)
- **ChaCha20-CTR**: Stream cipher for voice
- No authentication overhead (HMAC per second)
- 12-byte nonce with frame counter
- Uses HKDF-derived key from main protocol
### 4. Protocol Integration (`protocol.py`)
- Voice session management
- Message handlers for all voice messages
- Methods:
- `start_voice_call()`: Initiate call
- `accept_voice_call()`: Accept incoming
- `end_voice_call()`: Terminate
- `send_voice_audio()`: Process audio
## Usage Example
```python
# After key exchange is complete
alice.start_voice_call(codec_mode=5, fec_type=0)
# Bob automatically accepts if in auto mode
# Or manually: bob.accept_voice_call(session_id, codec_mode, fec_type)
# Send audio
audio_samples = generate_audio() # 8kHz, 16-bit PCM
alice.send_voice_audio(audio_samples)
# End call
alice.end_voice_call()
```
## Key Features
1. **Codec2 @ 1200 bps**
- Optimal for GSM vocoder survival
- Intelligible but "robotic" quality
2. **4-FSK Modulation**
- Survives GSM/AMR/EVS vocoders
- 2400 baud with FEC
3. **ChaCha20-CTR Encryption**
- Low latency stream cipher
- Frame-based IV management
4. **Forward Error Correction**
- Repetition code (3x)
- Future: Convolutional or LDPC
5. **No Special Requirements**
- Works over standard voice calls
- Compatible with any phone
- Software-only solution
## Testing
Run the test scripts:
- `test_voice_simple.py`: Basic voice call setup
- `test_voice_protocol.py`: Full test with audio simulation (requires numpy)
## Implementation Notes
1. Message disambiguation: VoiceStart sets high bit in flags field to distinguish from VoiceSync (both 20 bytes)
2. The actual Codec2 library would need to be integrated for production use
3. FEC implementation is simplified (repetition code) - production would use convolutional codes
4. Audio I/O integration needed for real voice calls
5. Jitter buffer and timing recovery needed for production
## Security Considerations
- Voice frames use ChaCha20-CTR without per-frame authentication
- HMAC computed over 1-second blocks for efficiency
- Session binding through encrypted session ID
- PFS maintained through main protocol key rotation

View File

@ -0,0 +1,430 @@
import time
import threading
import queue
from typing import Optional, Dict, Any, List, Callable, Tuple
# ANSI colors for logging
RED = "\033[91m"
GREEN = "\033[92m"
YELLOW = "\033[93m"
BLUE = "\033[94m"
RESET = "\033[0m"
class AutoModeConfig:
"""Configuration parameters for the automatic mode behavior."""
def __init__(self):
# Ping behavior
self.ping_response_accept = True # Whether to accept incoming pings
self.ping_auto_initiate = False # Whether to initiate pings when connected
self.ping_retry_count = 3 # Number of ping retries
self.ping_retry_delay = 5.0 # Seconds between ping retries
self.ping_timeout = 10.0 # Seconds to wait for ping response
self.preferred_cipher = 0 # 0=AES-GCM, 1=ChaCha20-Poly1305
# Handshake behavior
self.handshake_retry_count = 3 # Number of handshake retries
self.handshake_retry_delay = 5.0 # Seconds between handshake retries
self.handshake_timeout = 10.0 # Seconds to wait for handshake
# Messaging behavior
self.auto_message_enabled = False # Whether to auto-send messages
self.message_interval = 10.0 # Seconds between auto messages
self.message_content = "Hello, secure world!" # Default message
# General behavior
self.active_mode = False # If true, initiates protocol instead of waiting
class AutoMode:
"""
Manages automated behavior for the Icing protocol.
Handles automatic progression through the protocol stages:
1. Connection setup
2. Ping/discovery
3. Key exchange
4. Encrypted communication
"""
def __init__(self, protocol_interface):
"""
Initialize the AutoMode manager.
Args:
protocol_interface: An object implementing the required protocol methods
"""
self.protocol = protocol_interface
self.config = AutoModeConfig()
self.active = False
self.state = "idle"
# Message queue for automated sending
self.message_queue = queue.Queue()
# Tracking variables
self.ping_attempts = 0
self.handshake_attempts = 0
self.last_action_time = 0
self.timer_tasks = [] # List of active timer tasks (for cleanup)
def start(self):
"""Start the automatic mode."""
if self.active:
return
self.active = True
self.state = "idle"
self.ping_attempts = 0
self.handshake_attempts = 0
self.last_action_time = time.time()
self._log_info("Automatic mode started")
# Start in active mode if configured
if self.config.active_mode and self.protocol.connections:
self._start_ping_sequence()
def stop(self):
"""Stop the automatic mode and clean up any pending tasks."""
if not self.active:
return
# Cancel any pending timers
for timer in self.timer_tasks:
if timer.is_alive():
timer.cancel()
self.timer_tasks = []
self.active = False
self.state = "idle"
self._log_info("Automatic mode stopped")
def handle_connection_established(self):
"""Called when a new connection is established."""
if not self.active:
return
self._log_info("Connection established")
# If in active mode, start pinging
if self.config.active_mode:
self._start_ping_sequence()
def handle_ping_received(self, index: int):
"""
Handle a received ping request.
Args:
index: Index of the ping request in the protocol's inbound message queue
"""
if not self.active or not self._is_valid_message_index(index):
return
self._log_info(f"Ping request received (index={index})")
# Automatically respond to ping if configured to accept
if self.config.ping_response_accept:
self._log_info(f"Auto-responding to ping with accept={self.config.ping_response_accept}")
try:
# Schedule the response with a small delay to simulate real behavior
timer = threading.Timer(0.5, self._respond_to_ping, args=[index])
timer.daemon = True
timer.start()
self.timer_tasks.append(timer)
except Exception as e:
self._log_error(f"Failed to auto-respond to ping: {e}")
def handle_ping_response_received(self, accepted: bool):
"""
Handle a received ping response.
Args:
accepted: Whether the ping was accepted
"""
if not self.active:
return
self.ping_attempts = 0 # Reset ping attempts counter
if accepted:
self._log_info("Ping accepted! Proceeding with handshake")
# Send handshake if not already done
if self.state != "handshake_sent":
self._ensure_ephemeral_keys()
self._start_handshake_sequence()
else:
self._log_info("Ping rejected by peer. Stopping auto-protocol sequence.")
self.state = "idle"
def handle_handshake_received(self, index: int):
"""
Handle a received handshake.
Args:
index: Index of the handshake in the protocol's inbound message queue
"""
if not self.active or not self._is_valid_message_index(index):
return
self._log_info(f"Handshake received (index={index})")
try:
# Ensure we have ephemeral keys
self._ensure_ephemeral_keys()
# Process the handshake (compute ECDH)
self.protocol.generate_ecdhe(index)
# Derive HKDF key
self.protocol.derive_hkdf()
# If we haven't sent our handshake yet, send it
if self.state != "handshake_sent":
timer = threading.Timer(0.5, self.protocol.send_handshake)
timer.daemon = True
timer.start()
self.timer_tasks.append(timer)
self.state = "handshake_sent"
else:
self.state = "key_exchange_complete"
# Start sending queued messages if auto messaging is enabled
if self.config.auto_message_enabled:
self._start_message_sequence()
except Exception as e:
self._log_error(f"Failed to process handshake: {e}")
def handle_encrypted_received(self, index: int):
"""
Handle a received encrypted message.
Args:
index: Index of the encrypted message in the protocol's inbound message queue
"""
if not self.active or not self._is_valid_message_index(index):
return
# Try to decrypt automatically
try:
plaintext = self.protocol.decrypt_received_message(index)
self._log_info(f"Auto-decrypted message: {plaintext}")
except Exception as e:
self._log_error(f"Failed to auto-decrypt message: {e}")
def queue_message(self, message: str):
"""
Add a message to the auto-send queue.
Args:
message: Message text to send
"""
self.message_queue.put(message)
self._log_info(f"Message queued for sending: {message}")
# If we're in the right state, start sending messages
if self.active and self.state == "key_exchange_complete" and self.config.auto_message_enabled:
self._process_message_queue()
def _start_ping_sequence(self):
"""Start the ping sequence to discover the peer."""
if self.ping_attempts >= self.config.ping_retry_count:
self._log_warning(f"Maximum ping attempts ({self.config.ping_retry_count}) reached")
self.state = "idle"
return
self.state = "pinging"
self.ping_attempts += 1
self._log_info(f"Sending ping request (attempt {self.ping_attempts}/{self.config.ping_retry_count})")
try:
self.protocol.send_ping_request(self.config.preferred_cipher)
self.last_action_time = time.time()
# Schedule next ping attempt if needed
timer = threading.Timer(
self.config.ping_retry_delay,
self._check_ping_response
)
timer.daemon = True
timer.start()
self.timer_tasks.append(timer)
except Exception as e:
self._log_error(f"Failed to send ping: {e}")
def _check_ping_response(self):
"""Check if we got a ping response, retry if not."""
if not self.active or self.state != "pinging":
return
# If we've waited long enough for a response, retry
if time.time() - self.last_action_time >= self.config.ping_timeout:
self._log_warning("No ping response received, retrying")
self._start_ping_sequence()
def _respond_to_ping(self, index: int):
"""
Respond to a ping request.
Args:
index: Index of the ping request in the inbound messages
"""
if not self.active or not self._is_valid_message_index(index):
return
try:
answer = 1 if self.config.ping_response_accept else 0
self.protocol.respond_to_ping(index, answer)
if answer == 1:
# If we accepted, we should expect a handshake
self.state = "accepted_ping"
self._ensure_ephemeral_keys()
# Set a timer to send our handshake if we don't receive one
timer = threading.Timer(
self.config.handshake_timeout,
self._check_handshake_received
)
timer.daemon = True
timer.start()
self.timer_tasks.append(timer)
self.last_action_time = time.time()
except Exception as e:
self._log_error(f"Failed to respond to ping: {e}")
def _check_handshake_received(self):
"""Check if we've received a handshake after accepting a ping."""
if not self.active or self.state != "accepted_ping":
return
# If we've waited long enough and haven't received a handshake, initiate one
if time.time() - self.last_action_time >= self.config.handshake_timeout:
self._log_warning("No handshake received after accepting ping, initiating handshake")
self._start_handshake_sequence()
def _start_handshake_sequence(self):
"""Start the handshake sequence."""
if self.handshake_attempts >= self.config.handshake_retry_count:
self._log_warning(f"Maximum handshake attempts ({self.config.handshake_retry_count}) reached")
self.state = "idle"
return
self.state = "handshake_sent"
self.handshake_attempts += 1
self._log_info(f"Sending handshake (attempt {self.handshake_attempts}/{self.config.handshake_retry_count})")
try:
self.protocol.send_handshake()
self.last_action_time = time.time()
# Schedule handshake retry check
timer = threading.Timer(
self.config.handshake_retry_delay,
self._check_handshake_response
)
timer.daemon = True
timer.start()
self.timer_tasks.append(timer)
except Exception as e:
self._log_error(f"Failed to send handshake: {e}")
def _check_handshake_response(self):
"""Check if we've completed the key exchange, retry handshake if not."""
if not self.active or self.state != "handshake_sent":
return
# If we've waited long enough for a response, retry
if time.time() - self.last_action_time >= self.config.handshake_timeout:
self._log_warning("No handshake response received, retrying")
self._start_handshake_sequence()
def _start_message_sequence(self):
"""Start the automated message sending sequence."""
if not self.config.auto_message_enabled:
return
self._log_info("Starting automated message sequence")
# Add the default message if queue is empty
if self.message_queue.empty():
self.message_queue.put(self.config.message_content)
# Start processing the queue
self._process_message_queue()
def _process_message_queue(self):
"""Process messages in the queue and send them."""
if not self.active or self.state != "key_exchange_complete" or not self.config.auto_message_enabled:
return
if not self.message_queue.empty():
message = self.message_queue.get()
self._log_info(f"Sending queued message: {message}")
try:
self.protocol.send_encrypted_message(message)
# Schedule next message send
timer = threading.Timer(
self.config.message_interval,
self._process_message_queue
)
timer.daemon = True
timer.start()
self.timer_tasks.append(timer)
except Exception as e:
self._log_error(f"Failed to send queued message: {e}")
# Put the message back in the queue
self.message_queue.put(message)
def _ensure_ephemeral_keys(self):
"""Ensure ephemeral keys are generated if needed."""
if not hasattr(self.protocol, 'ephemeral_pubkey') or self.protocol.ephemeral_pubkey is None:
self._log_info("Generating ephemeral keys")
self.protocol.generate_ephemeral_keys()
def _is_valid_message_index(self, index: int) -> bool:
"""
Check if a message index is valid in the protocol's inbound_messages queue.
Args:
index: The index to check
Returns:
bool: True if the index is valid, False otherwise
"""
if not hasattr(self.protocol, 'inbound_messages'):
self._log_error("Protocol has no inbound_messages attribute")
return False
if index < 0 or index >= len(self.protocol.inbound_messages):
self._log_error(f"Invalid message index: {index}")
return False
return True
# Helper methods for logging
def _log_info(self, message: str):
print(f"{BLUE}[AUTO]{RESET} {message}")
if hasattr(self, 'verbose_logging') and self.verbose_logging:
state_info = f"(state={self.state})"
if 'pinging' in self.state and hasattr(self, 'ping_attempts'):
state_info += f", attempts={self.ping_attempts}/{self.config.ping_retry_count}"
elif 'handshake' in self.state and hasattr(self, 'handshake_attempts'):
state_info += f", attempts={self.handshake_attempts}/{self.config.handshake_retry_count}"
print(f"{BLUE}[AUTO-DETAIL]{RESET} {state_info}")
def _log_warning(self, message: str):
print(f"{YELLOW}[AUTO-WARN]{RESET} {message}")
if hasattr(self, 'verbose_logging') and self.verbose_logging:
timer_info = f"Active timers: {len(self.timer_tasks)}"
print(f"{YELLOW}[AUTO-WARN-DETAIL]{RESET} {timer_info}")
def _log_error(self, message: str):
print(f"{RED}[AUTO-ERROR]{RESET} {message}")
if hasattr(self, 'verbose_logging') and self.verbose_logging:
print(f"{RED}[AUTO-ERROR-DETAIL]{RESET} Current state: {self.state}, Active: {self.active}")

View File

@ -0,0 +1,328 @@
import sys
import argparse
import shlex
from protocol import IcingProtocol
RED = "\033[91m"
GREEN = "\033[92m"
YELLOW = "\033[93m"
BLUE = "\033[94m"
MAGENTA = "\033[95m"
CYAN = "\033[96m"
RESET = "\033[0m"
def print_help():
"""Display all available commands."""
print(f"\n{YELLOW}=== Available Commands ==={RESET}")
print(f"\n{CYAN}Basic Protocol Commands:{RESET}")
print(" help - Show this help message")
print(" peer_id <hex_pubkey> - Set peer identity public key")
print(" connect <port> - Connect to a peer at the specified port")
print(" show_state - Display current protocol state")
print(" exit - Exit the program")
print(f"\n{CYAN}Manual Protocol Operation:{RESET}")
print(" generate_ephemeral_keys - Generate ephemeral ECDH keys")
print(" send_ping [cipher] - Send PING request (cipher: 0=AES-GCM, 1=ChaCha20-Poly1305, default: 0)")
print(" respond_ping <index> <0|1> - Respond to a PING (0=reject, 1=accept)")
print(" send_handshake - Send handshake with ephemeral keys")
print(" generate_ecdhe <index> - Process handshake at specified index")
print(" derive_hkdf - Derive encryption key using HKDF")
print(" send_encrypted <plaintext> - Encrypt and send a message")
print(" decrypt <index> - Decrypt received message at index")
print(f"\n{CYAN}Automatic Mode Commands:{RESET}")
print(" auto start - Start automatic mode")
print(" auto stop - Stop automatic mode")
print(" auto status - Show current auto mode status and configuration")
print(" auto config <param> <value> - Configure auto mode parameters")
print(" auto config list - Show all configurable parameters")
print(" auto message <text> - Queue message for automatic sending")
print(" auto passive - Configure as passive peer (responds to pings but doesn't initiate)")
print(" auto active - Configure as active peer (initiates protocol)")
print(" auto log - Toggle detailed logging for auto mode")
print(f"\n{CYAN}Debugging Commands:{RESET}")
print(" debug_message <index> - Display detailed information about a message in the queue")
print(f"\n{CYAN}Legacy Commands:{RESET}")
print(" auto_responder <on|off> - Enable/disable legacy auto responder (deprecated)")
def main():
protocol = IcingProtocol()
print(f"{YELLOW}\n======================================")
print(" Icing Protocol - Secure Communication ")
print("======================================\n" + RESET)
print(f"Listening on port: {protocol.local_port}")
print(f"Your identity public key (hex): {protocol.identity_pubkey.hex()}")
print_help()
while True:
try:
line = input(f"{MAGENTA}Cmd>{RESET} ").strip()
except EOFError:
break
if not line:
continue
parts = shlex.split(line) # Handle quoted arguments properly
cmd = parts[0].lower()
try:
# Basic commands
if cmd == "exit":
protocol.stop()
break
elif cmd == "help":
print_help()
elif cmd == "show_state":
protocol.show_state()
elif cmd == "peer_id":
if len(parts) != 2:
print(f"{RED}[ERROR]{RESET} Usage: peer_id <hex_pubkey>")
continue
try:
protocol.set_peer_identity(parts[1])
except ValueError as e:
print(f"{RED}[ERROR]{RESET} Invalid public key: {e}")
elif cmd == "connect":
if len(parts) != 2:
print(f"{RED}[ERROR]{RESET} Usage: connect <port>")
continue
try:
port = int(parts[1])
protocol.connect_to_peer(port)
except ValueError:
print(f"{RED}[ERROR]{RESET} Invalid port number.")
except Exception as e:
print(f"{RED}[ERROR]{RESET} Connection failed: {e}")
# Manual protocol operation
elif cmd == "generate_ephemeral_keys":
protocol.generate_ephemeral_keys()
elif cmd == "send_ping":
# Optional cipher parameter (0 = AES-GCM, 1 = ChaCha20-Poly1305)
cipher = 0 # Default to AES-GCM
if len(parts) >= 2:
try:
cipher = int(parts[1])
if cipher not in (0, 1):
print(f"{YELLOW}[WARNING]{RESET} Unsupported cipher code {cipher}. Using AES-GCM (0).")
cipher = 0
except ValueError:
print(f"{YELLOW}[WARNING]{RESET} Invalid cipher code. Using AES-GCM (0).")
protocol.send_ping_request(cipher)
elif cmd == "send_handshake":
protocol.send_handshake()
elif cmd == "respond_ping":
if len(parts) != 3:
print(f"{RED}[ERROR]{RESET} Usage: respond_ping <index> <0|1>")
continue
try:
idx = int(parts[1])
answer = int(parts[2])
if answer not in (0, 1):
print(f"{RED}[ERROR]{RESET} Answer must be 0 (reject) or 1 (accept).")
continue
protocol.respond_to_ping(idx, answer)
except ValueError:
print(f"{RED}[ERROR]{RESET} Index and answer must be integers.")
except Exception as e:
print(f"{RED}[ERROR]{RESET} Failed to respond to ping: {e}")
elif cmd == "generate_ecdhe":
if len(parts) != 2:
print(f"{RED}[ERROR]{RESET} Usage: generate_ecdhe <index>")
continue
try:
idx = int(parts[1])
protocol.generate_ecdhe(idx)
except ValueError:
print(f"{RED}[ERROR]{RESET} Index must be an integer.")
except Exception as e:
print(f"{RED}[ERROR]{RESET} Failed to process handshake: {e}")
elif cmd == "derive_hkdf":
try:
protocol.derive_hkdf()
except Exception as e:
print(f"{RED}[ERROR]{RESET} Failed to derive HKDF key: {e}")
elif cmd == "send_encrypted":
if len(parts) < 2:
print(f"{RED}[ERROR]{RESET} Usage: send_encrypted <plaintext>")
continue
plaintext = " ".join(parts[1:])
try:
protocol.send_encrypted_message(plaintext)
except Exception as e:
print(f"{RED}[ERROR]{RESET} Failed to send encrypted message: {e}")
elif cmd == "decrypt":
if len(parts) != 2:
print(f"{RED}[ERROR]{RESET} Usage: decrypt <index>")
continue
try:
idx = int(parts[1])
protocol.decrypt_received_message(idx)
except ValueError:
print(f"{RED}[ERROR]{RESET} Index must be an integer.")
except Exception as e:
print(f"{RED}[ERROR]{RESET} Failed to decrypt message: {e}")
# Debugging commands
elif cmd == "debug_message":
if len(parts) != 2:
print(f"{RED}[ERROR]{RESET} Usage: debug_message <index>")
continue
try:
idx = int(parts[1])
protocol.debug_message(idx)
except ValueError:
print(f"{RED}[ERROR]{RESET} Index must be an integer.")
except Exception as e:
print(f"{RED}[ERROR]{RESET} Failed to debug message: {e}")
# Automatic mode commands
elif cmd == "auto":
if len(parts) < 2:
print(f"{RED}[ERROR]{RESET} Usage: auto <command> [options]")
print("Available commands: start, stop, status, config, message, passive, active")
continue
subcmd = parts[1].lower()
if subcmd == "start":
protocol.start_auto_mode()
print(f"{GREEN}[AUTO]{RESET} Automatic mode started")
elif subcmd == "stop":
protocol.stop_auto_mode()
print(f"{GREEN}[AUTO]{RESET} Automatic mode stopped")
elif subcmd == "status":
config = protocol.get_auto_mode_config()
print(f"{YELLOW}=== Auto Mode Status ==={RESET}")
print(f"Active: {protocol.auto_mode.active}")
print(f"State: {protocol.auto_mode.state}")
print(f"\n{YELLOW}--- Configuration ---{RESET}")
for key, value in vars(config).items():
print(f" {key}: {value}")
elif subcmd == "config":
if len(parts) < 3:
print(f"{RED}[ERROR]{RESET} Usage: auto config <param> <value> or auto config list")
continue
if parts[2].lower() == "list":
config = protocol.get_auto_mode_config()
print(f"{YELLOW}=== Auto Mode Configuration Parameters ==={RESET}")
for key, value in vars(config).items():
print(f" {key} ({type(value).__name__}): {value}")
continue
if len(parts) != 4:
print(f"{RED}[ERROR]{RESET} Usage: auto config <param> <value>")
continue
param = parts[2]
value_str = parts[3]
# Convert the string value to the appropriate type
config = protocol.get_auto_mode_config()
if not hasattr(config, param):
print(f"{RED}[ERROR]{RESET} Unknown parameter: {param}")
print("Use 'auto config list' to see all available parameters")
continue
current_value = getattr(config, param)
try:
if isinstance(current_value, bool):
if value_str.lower() in ("true", "yes", "on", "1"):
value = True
elif value_str.lower() in ("false", "no", "off", "0"):
value = False
else:
raise ValueError(f"Boolean value must be true/false/yes/no/on/off/1/0")
elif isinstance(current_value, int):
value = int(value_str)
elif isinstance(current_value, float):
value = float(value_str)
elif isinstance(current_value, str):
value = value_str
else:
value = value_str # Default to string
protocol.configure_auto_mode(**{param: value})
print(f"{GREEN}[AUTO]{RESET} Set {param} = {value}")
except ValueError as e:
print(f"{RED}[ERROR]{RESET} Invalid value for {param}: {e}")
elif subcmd == "message":
if len(parts) < 3:
print(f"{RED}[ERROR]{RESET} Usage: auto message <text>")
continue
message = " ".join(parts[2:])
protocol.queue_auto_message(message)
print(f"{GREEN}[AUTO]{RESET} Message queued for sending: {message}")
elif subcmd == "passive":
# Configure as passive peer (responds but doesn't initiate)
protocol.configure_auto_mode(
ping_response_accept=True,
ping_auto_initiate=False,
active_mode=False
)
print(f"{GREEN}[AUTO]{RESET} Configured as passive peer")
elif subcmd == "active":
# Configure as active peer (initiates protocol)
protocol.configure_auto_mode(
ping_response_accept=True,
ping_auto_initiate=True,
active_mode=True
)
print(f"{GREEN}[AUTO]{RESET} Configured as active peer")
else:
print(f"{RED}[ERROR]{RESET} Unknown auto mode command: {subcmd}")
print("Available commands: start, stop, status, config, message, passive, active")
# Legacy commands
elif cmd == "auto_responder":
if len(parts) != 2:
print(f"{RED}[ERROR]{RESET} Usage: auto_responder <on|off>")
continue
val = parts[1].lower()
if val not in ("on", "off"):
print(f"{RED}[ERROR]{RESET} Value must be 'on' or 'off'.")
continue
protocol.enable_auto_responder(val == "on")
print(f"{YELLOW}[WARNING]{RESET} Using legacy auto responder. Consider using 'auto' commands instead.")
else:
print(f"{RED}[ERROR]{RESET} Unknown command: {cmd}")
print("Type 'help' for a list of available commands.")
except Exception as e:
print(f"{RED}[ERROR]{RESET} Command failed: {e}")
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
print("\nExiting...")
except Exception as e:
print(f"{RED}[FATAL ERROR]{RESET} {e}")
sys.exit(1)

View File

@ -0,0 +1,165 @@
import os
from typing import Tuple
from cryptography.exceptions import InvalidSignature
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import ec, utils
from cryptography.hazmat.primitives.asymmetric.utils import decode_dss_signature, encode_dss_signature
def generate_identity_keys() -> Tuple[ec.EllipticCurvePrivateKey, bytes]:
"""
Generate an ECDSA (P-256) identity key pair.
Returns:
Tuple containing:
- private_key: EllipticCurvePrivateKey object
- public_key_bytes: Raw x||y format (64 bytes, 512 bits)
"""
private_key = ec.generate_private_key(ec.SECP256R1())
public_numbers = private_key.public_key().public_numbers()
x_bytes = public_numbers.x.to_bytes(32, byteorder='big')
y_bytes = public_numbers.y.to_bytes(32, byteorder='big')
pubkey_bytes = x_bytes + y_bytes # 64 bytes total
return private_key, pubkey_bytes
def load_peer_identity_key(pubkey_bytes: bytes) -> ec.EllipticCurvePublicKey:
"""
Convert a raw public key (64 bytes, x||y format) to a cryptography public key object.
Args:
pubkey_bytes: Raw 64-byte public key (x||y format)
Returns:
EllipticCurvePublicKey object
Raises:
ValueError: If the pubkey_bytes is not exactly 64 bytes
"""
if len(pubkey_bytes) != 64:
raise ValueError("Peer identity pubkey must be exactly 64 bytes (x||y).")
x_int = int.from_bytes(pubkey_bytes[:32], byteorder='big')
y_int = int.from_bytes(pubkey_bytes[32:], byteorder='big')
public_numbers = ec.EllipticCurvePublicNumbers(x_int, y_int, ec.SECP256R1())
return public_numbers.public_key()
def sign_data(private_key: ec.EllipticCurvePrivateKey, data: bytes) -> bytes:
"""
Sign data with ECDSA using a P-256 private key.
Args:
private_key: EllipticCurvePrivateKey for signing
data: Bytes to sign
Returns:
DER-encoded signature (variable length, up to ~70-72 bytes)
"""
signature = private_key.sign(data, ec.ECDSA(hashes.SHA256()))
return signature
def verify_signature(public_key: ec.EllipticCurvePublicKey, signature: bytes, data: bytes) -> bool:
"""
Verify a DER-encoded ECDSA signature.
Args:
public_key: EllipticCurvePublicKey for verification
signature: DER-encoded signature
data: Original signed data
Returns:
True if signature is valid, False otherwise
"""
try:
public_key.verify(signature, data, ec.ECDSA(hashes.SHA256()))
return True
except InvalidSignature:
return False
def get_ephemeral_keypair() -> Tuple[ec.EllipticCurvePrivateKey, bytes]:
"""
Generate an ephemeral ECDH key pair (P-256).
Returns:
Tuple containing:
- private_key: EllipticCurvePrivateKey object
- pubkey_bytes: Raw x||y format (64 bytes, 512 bits)
"""
private_key = ec.generate_private_key(ec.SECP256R1())
numbers = private_key.public_key().public_numbers()
x_bytes = numbers.x.to_bytes(32, 'big')
y_bytes = numbers.y.to_bytes(32, 'big')
return private_key, x_bytes + y_bytes # 64 bytes total
def compute_ecdh_shared_key(private_key: ec.EllipticCurvePrivateKey, peer_pubkey_bytes: bytes) -> bytes:
"""
Compute a shared secret using ECDH.
Args:
private_key: Local ECDH private key
peer_pubkey_bytes: Peer's ephemeral public key (64 bytes, raw x||y format)
Returns:
Shared secret bytes
Raises:
ValueError: If peer_pubkey_bytes is not 64 bytes
"""
if len(peer_pubkey_bytes) != 64:
raise ValueError("Peer public key must be 64 bytes (x||y format)")
x_int = int.from_bytes(peer_pubkey_bytes[:32], 'big')
y_int = int.from_bytes(peer_pubkey_bytes[32:], 'big')
# Create public key object from raw components
peer_public_numbers = ec.EllipticCurvePublicNumbers(x_int, y_int, ec.SECP256R1())
peer_public_key = peer_public_numbers.public_key()
# Perform key exchange
shared_key = private_key.exchange(ec.ECDH(), peer_public_key)
return shared_key
def der_to_raw(der_sig: bytes) -> bytes:
"""
Convert a DER-encoded ECDSA signature to a raw 64-byte signature (r||s).
Args:
der_sig: DER-encoded signature
Returns:
Raw 64-byte signature (r||s format), with each component padded to 32 bytes
"""
r, s = decode_dss_signature(der_sig)
r_bytes = r.to_bytes(32, byteorder='big')
s_bytes = s.to_bytes(32, byteorder='big')
return r_bytes + s_bytes
def raw_signature_to_der(raw_sig: bytes) -> bytes:
"""
Convert a raw signature (64 bytes, concatenated r||s) to DER-encoded signature.
Args:
raw_sig: Raw 64-byte signature (r||s format)
Returns:
DER-encoded signature
Raises:
ValueError: If raw_sig is not 64 bytes
"""
if len(raw_sig) != 64:
raise ValueError("Raw signature must be 64 bytes (r||s).")
r = int.from_bytes(raw_sig[:32], 'big')
s = int.from_bytes(raw_sig[32:], 'big')
return encode_dss_signature(r, s)

View File

@ -0,0 +1,307 @@
import os
import struct
from typing import Optional, Tuple
from cryptography.hazmat.primitives.ciphers.aead import AESGCM, ChaCha20Poly1305
class MessageHeader:
"""
Header of an encrypted message (18 bytes total):
Clear Text Section (4 bytes):
- flag: 16 bits (0xBEEF by default)
- data_len: 16 bits (length of encrypted payload excluding tag)
Associated Data (14 bytes):
- retry: 8 bits (retry counter)
- connection_status: 4 bits (e.g., CRC required) + 4 bits padding
- iv/messageID: 96 bits (12 bytes)
"""
def __init__(self, flag: int, data_len: int, retry: int, connection_status: int, iv: bytes):
if not (0 <= flag < 65536):
raise ValueError("Flag must fit in 16 bits (0..65535)")
if not (0 <= data_len < 65536):
raise ValueError("Data length must fit in 16 bits (0..65535)")
if not (0 <= retry < 256):
raise ValueError("Retry must fit in 8 bits (0..255)")
if not (0 <= connection_status < 16):
raise ValueError("Connection status must fit in 4 bits (0..15)")
if len(iv) != 12:
raise ValueError("IV must be 12 bytes (96 bits)")
self.flag = flag # 16 bits
self.data_len = data_len # 16 bits
self.retry = retry # 8 bits
self.connection_status = connection_status # 4 bits
self.iv = iv # 96 bits (12 bytes)
def pack(self) -> bytes:
"""Pack header into 18 bytes."""
# Pack flag and data_len (4 bytes)
header = struct.pack('>H H', self.flag, self.data_len)
# Pack retry and connection_status (2 bytes)
# connection_status in high 4 bits of second byte, 4 bits padding as zero
ad_byte = (self.connection_status & 0x0F) << 4
ad_packed = struct.pack('>B B', self.retry, ad_byte)
# Append IV (12 bytes)
return header + ad_packed + self.iv
def get_associated_data(self) -> bytes:
"""Get the associated data for AEAD encryption (retry, conn_status, iv)."""
# Pack retry and connection_status
ad_byte = (self.connection_status & 0x0F) << 4
ad_packed = struct.pack('>B B', self.retry, ad_byte)
# Append IV
return ad_packed + self.iv
@classmethod
def unpack(cls, data: bytes) -> 'MessageHeader':
"""Unpack 18 bytes into a MessageHeader object."""
if len(data) < 18:
raise ValueError(f"Header data too short: {len(data)} bytes, expected 18")
flag, data_len = struct.unpack('>H H', data[:4])
retry, ad_byte = struct.unpack('>B B', data[4:6])
connection_status = (ad_byte >> 4) & 0x0F
iv = data[6:18]
return cls(flag, data_len, retry, connection_status, iv)
class EncryptedMessage:
"""
Encrypted message packet format:
- Header (18 bytes):
* flag: 16 bits
* data_len: 16 bits
* retry: 8 bits
* connection_status: 4 bits (+ 4 bits padding)
* iv/messageID: 96 bits (12 bytes)
- Payload: variable length encrypted data
- Footer:
* Authentication tag: 128 bits (16 bytes)
* CRC32: 32 bits (4 bytes) - optional, based on connection_status
"""
def __init__(self, plaintext: bytes, key: bytes, flag: int = 0xBEEF,
retry: int = 0, connection_status: int = 0, iv: bytes = None,
cipher_type: int = 0):
self.plaintext = plaintext
self.key = key
self.flag = flag
self.retry = retry
self.connection_status = connection_status
self.iv = iv or generate_iv(initial=True)
self.cipher_type = cipher_type # 0 = AES-256-GCM, 1 = ChaCha20-Poly1305
# Will be set after encryption
self.ciphertext = None
self.tag = None
self.header = None
def encrypt(self) -> bytes:
"""Encrypt the plaintext and return the full encrypted message."""
# Create header with correct data_len (which will be set after encryption)
self.header = MessageHeader(
flag=self.flag,
data_len=0, # Will be updated after encryption
retry=self.retry,
connection_status=self.connection_status,
iv=self.iv
)
# Get associated data for AEAD
aad = self.header.get_associated_data()
# Encrypt using the appropriate cipher
if self.cipher_type == 0: # AES-256-GCM
cipher = AESGCM(self.key)
ciphertext_with_tag = cipher.encrypt(self.iv, self.plaintext, aad)
elif self.cipher_type == 1: # ChaCha20-Poly1305
cipher = ChaCha20Poly1305(self.key)
ciphertext_with_tag = cipher.encrypt(self.iv, self.plaintext, aad)
else:
raise ValueError(f"Unsupported cipher type: {self.cipher_type}")
# Extract ciphertext and tag
self.tag = ciphertext_with_tag[-16:]
self.ciphertext = ciphertext_with_tag[:-16]
# Update header with actual data length
self.header.data_len = len(self.ciphertext)
# Pack everything together
packed_header = self.header.pack()
# Check if CRC is required (based on connection_status)
if self.connection_status & 0x01: # Lowest bit indicates CRC required
import zlib
# Compute CRC32 of header + ciphertext + tag
crc = zlib.crc32(packed_header + self.ciphertext + self.tag) & 0xffffffff
crc_bytes = struct.pack('>I', crc)
return packed_header + self.ciphertext + self.tag + crc_bytes
else:
return packed_header + self.ciphertext + self.tag
@classmethod
def decrypt(cls, data: bytes, key: bytes, cipher_type: int = 0) -> Tuple[bytes, MessageHeader]:
"""
Decrypt an encrypted message and return the plaintext and header.
Args:
data: The full encrypted message
key: The encryption key
cipher_type: 0 for AES-256-GCM, 1 for ChaCha20-Poly1305
Returns:
Tuple of (plaintext, header)
"""
if len(data) < 18 + 16: # Header + minimum tag size
raise ValueError("Message too short")
# Extract header
header_bytes = data[:18]
header = MessageHeader.unpack(header_bytes)
# Get ciphertext and tag
data_len = header.data_len
ciphertext_start = 18
ciphertext_end = ciphertext_start + data_len
if ciphertext_end + 16 > len(data):
raise ValueError("Message length does not match header's data_len")
ciphertext = data[ciphertext_start:ciphertext_end]
tag = data[ciphertext_end:ciphertext_end + 16]
# Get associated data for AEAD
aad = header.get_associated_data()
# Combine ciphertext and tag for decryption
ciphertext_with_tag = ciphertext + tag
# Decrypt using the appropriate cipher
try:
if cipher_type == 0: # AES-256-GCM
cipher = AESGCM(key)
plaintext = cipher.decrypt(header.iv, ciphertext_with_tag, aad)
elif cipher_type == 1: # ChaCha20-Poly1305
cipher = ChaCha20Poly1305(key)
plaintext = cipher.decrypt(header.iv, ciphertext_with_tag, aad)
else:
raise ValueError(f"Unsupported cipher type: {cipher_type}")
return plaintext, header
except Exception as e:
raise ValueError(f"Decryption failed: {e}")
def generate_iv(initial: bool = False, previous_iv: bytes = None) -> bytes:
"""
Generate a 96-bit IV (12 bytes).
Args:
initial: If True, return a random IV
previous_iv: The previous IV to increment
Returns:
A new IV
"""
if initial or previous_iv is None:
return os.urandom(12) # 96 bits
else:
# Increment the previous IV by 1 modulo 2^96
iv_int = int.from_bytes(previous_iv, 'big')
iv_int = (iv_int + 1) % (1 << 96)
return iv_int.to_bytes(12, 'big')
# Convenience functions to match original API
def encrypt_message(plaintext: bytes, key: bytes, flag: int = 0xBEEF,
retry: int = 0, connection_status: int = 0,
iv: bytes = None, cipher_type: int = 0) -> bytes:
"""
Encrypt a message using the specified parameters.
Args:
plaintext: The data to encrypt
key: The encryption key (32 bytes for AES-256-GCM, 32 bytes for ChaCha20-Poly1305)
flag: 16-bit flag value (default: 0xBEEF)
retry: 8-bit retry counter
connection_status: 4-bit connection status
iv: Optional 96-bit IV (if None, a random one will be generated)
cipher_type: 0 for AES-256-GCM, 1 for ChaCha20-Poly1305
Returns:
The full encrypted message
"""
message = EncryptedMessage(
plaintext=plaintext,
key=key,
flag=flag,
retry=retry,
connection_status=connection_status,
iv=iv,
cipher_type=cipher_type
)
return message.encrypt()
def decrypt_message(message: bytes, key: bytes, cipher_type: int = 0) -> bytes:
"""
Decrypt a message.
Args:
message: The full encrypted message
key: The encryption key
cipher_type: 0 for AES-256-GCM, 1 for ChaCha20-Poly1305
Returns:
The decrypted plaintext
"""
plaintext, _ = EncryptedMessage.decrypt(message, key, cipher_type)
return plaintext
# ChaCha20-CTR functions for voice streaming (without authentication)
def chacha20_encrypt(plaintext: bytes, key: bytes, nonce: bytes) -> bytes:
"""
Encrypt plaintext using ChaCha20 in CTR mode (no authentication).
Args:
plaintext: Data to encrypt
key: 32-byte key
nonce: 16-byte nonce (for ChaCha20 in cryptography library)
Returns:
Ciphertext
"""
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from cryptography.hazmat.backends import default_backend
if len(key) != 32:
raise ValueError("ChaCha20 key must be 32 bytes")
if len(nonce) != 16:
raise ValueError("ChaCha20 nonce must be 16 bytes")
cipher = Cipher(
algorithms.ChaCha20(key, nonce),
mode=None,
backend=default_backend()
)
encryptor = cipher.encryptor()
return encryptor.update(plaintext) + encryptor.finalize()
def chacha20_decrypt(ciphertext: bytes, key: bytes, nonce: bytes) -> bytes:
"""
Decrypt ciphertext using ChaCha20 in CTR mode (no authentication).
Args:
ciphertext: Data to decrypt
key: 32-byte key
nonce: 12-byte nonce
Returns:
Plaintext
"""
# ChaCha20 is symmetrical - encryption and decryption are the same
return chacha20_encrypt(ciphertext, key, nonce)

View File

@ -0,0 +1,463 @@
import os
import struct
import time
import zlib
import hashlib
from typing import Tuple, Optional
def crc32_of(data: bytes) -> int:
"""
Compute CRC-32 of 'data'.
"""
return zlib.crc32(data) & 0xffffffff
# ---------------------------------------------------------------------------
# PING REQUEST (new format)
# Fields (in order):
# - session_nonce: 129 bits (from the top 129 bits of 17 random bytes)
# - version: 7 bits
# - cipher: 4 bits (0 = AES-256-GCM, 1 = ChaCha20-poly1305; for now only 0 is used)
# - CRC: 32 bits
#
# Total bits: 129 + 7 + 4 + 32 = 172 bits. We pack into 22 bytes (176 bits) with 4 spare bits.
# ---------------------------------------------------------------------------
class PingRequest:
"""
PING REQUEST format (172 bits / 22 bytes):
- session_nonce: 129 bits (from top 129 bits of 17 random bytes)
- version: 7 bits
- cipher: 4 bits (0 = AES-256-GCM, 1 = ChaCha20-poly1305)
- CRC: 32 bits
"""
def __init__(self, version: int, cipher: int, session_nonce: bytes = None):
if not (0 <= version < 128):
raise ValueError("Version must fit in 7 bits (0..127)")
if not (0 <= cipher < 16):
raise ValueError("Cipher must fit in 4 bits (0..15)")
self.version = version
self.cipher = cipher
# Generate session nonce if not provided
if session_nonce is None:
# Generate 17 random bytes
nonce_full = os.urandom(17)
# Use top 129 bits
nonce_int_full = int.from_bytes(nonce_full, 'big')
nonce_129_int = nonce_int_full >> 7 # drop lowest 7 bits
self.session_nonce = nonce_129_int.to_bytes(17, 'big')
else:
if len(session_nonce) != 17:
raise ValueError("Session nonce must be 17 bytes (136 bits)")
self.session_nonce = session_nonce
def serialize(self) -> bytes:
"""Serialize the ping request into a 22-byte packet."""
# Convert session_nonce to integer (129 bits)
nonce_int = int.from_bytes(self.session_nonce, 'big')
# Pack fields: shift nonce left by 11 bits, add version and cipher
partial_int = (nonce_int << 11) | (self.version << 4) | (self.cipher & 0x0F)
# This creates 129+7+4 = 140 bits; pack into 18 bytes
partial_bytes = partial_int.to_bytes(18, 'big')
# Compute CRC over these 18 bytes
cval = crc32_of(partial_bytes)
# Combine partial data with 32-bit CRC
final_int = (int.from_bytes(partial_bytes, 'big') << 32) | cval
return final_int.to_bytes(22, 'big')
@classmethod
def deserialize(cls, data: bytes) -> Optional['PingRequest']:
"""Deserialize a 22-byte packet into a PingRequest object."""
if len(data) != 22:
return None
# Extract 176-bit integer
final_int = int.from_bytes(data, 'big')
# Extract CRC and verify
crc_in = final_int & 0xffffffff
partial_int = final_int >> 32 # 140 bits
partial_bytes = partial_int.to_bytes(18, 'big')
crc_calc = crc32_of(partial_bytes)
if crc_calc != crc_in:
return None
# Extract fields
cipher = partial_int & 0x0F
version = (partial_int >> 4) & 0x7F
nonce_129_int = partial_int >> 11 # 129 bits
session_nonce = nonce_129_int.to_bytes(17, 'big')
return cls(version, cipher, session_nonce)
# ---------------------------------------------------------------------------
# PING RESPONSE (new format)
# Fields:
# - timestamp: 32 bits (we take the lower 32 bits of the time in ms)
# - version: 7 bits
# - cipher: 4 bits
# - answer: 1 bit
# - CRC: 32 bits
#
# Total bits: 32 + 7 + 4 + 1 + 32 = 76 bits; pack into 10 bytes (80 bits) with 4 spare bits.
# ---------------------------------------------------------------------------
class PingResponse:
"""
PING RESPONSE format (76 bits / 10 bytes):
- timestamp: 32 bits (milliseconds since epoch, lower 32 bits)
- version: 7 bits
- cipher: 4 bits
- answer: 1 bit (0 = no, 1 = yes)
- CRC: 32 bits
"""
def __init__(self, version: int, cipher: int, answer: int, timestamp: int = None):
if not (0 <= version < 128):
raise ValueError("Version must fit in 7 bits")
if not (0 <= cipher < 16):
raise ValueError("Cipher must fit in 4 bits")
if answer not in (0, 1):
raise ValueError("Answer must be 0 or 1")
self.version = version
self.cipher = cipher
self.answer = answer
self.timestamp = timestamp or (int(time.time() * 1000) & 0xffffffff)
def serialize(self) -> bytes:
"""Serialize the ping response into a 10-byte packet."""
# Pack timestamp, version, cipher, answer: 32+7+4+1 = 44 bits
# Shift left by 4 to put spare bits at the end
partial_val = (self.timestamp << (7+4+1)) | (self.version << (4+1)) | (self.cipher << 1) | self.answer
partial_val_shifted = partial_val << 4 # Add 4 spare bits at the end
partial_bytes = partial_val_shifted.to_bytes(6, 'big') # 6 bytes = 48 bits
# Compute CRC
cval = crc32_of(partial_bytes)
# Combine with CRC
final_val = (int.from_bytes(partial_bytes, 'big') << 32) | cval
return final_val.to_bytes(10, 'big')
@classmethod
def deserialize(cls, data: bytes) -> Optional['PingResponse']:
"""Deserialize a 10-byte packet into a PingResponse object."""
if len(data) != 10:
return None
# Extract 80-bit integer
final_int = int.from_bytes(data, 'big')
# Extract CRC and verify
crc_in = final_int & 0xffffffff
partial_int = final_int >> 32 # 48 bits
partial_bytes = partial_int.to_bytes(6, 'big')
crc_calc = crc32_of(partial_bytes)
if crc_calc != crc_in:
return None
# Extract fields (discard 4 spare bits)
partial_int >>= 4 # now 44 bits
answer = partial_int & 0x01
cipher = (partial_int >> 1) & 0x0F
version = (partial_int >> (1+4)) & 0x7F
timestamp = partial_int >> (1+4+7)
return cls(version, cipher, answer, timestamp)
# =============================================================================
# 3) Handshake
# - 32-bit timestamp
# - 64-byte ephemeral pubkey (raw x||y = 512 bits)
# - 64-byte ephemeral signature (raw r||s = 512 bits)
# - 32-byte PFS hash (256 bits)
# - 32-bit CRC
# => total 4 + 64 + 64 + 32 + 4 = 168 bytes = 1344 bits
# =============================================================================
class Handshake:
"""
HANDSHAKE format (1344 bits / 168 bytes):
- timestamp: 32 bits
- ephemeral_pubkey: 512 bits (64 bytes, raw x||y format)
- ephemeral_signature: 512 bits (64 bytes, raw r||s format)
- pfs_hash: 256 bits (32 bytes)
- CRC: 32 bits
"""
def __init__(self, ephemeral_pubkey: bytes, ephemeral_signature: bytes, pfs_hash: bytes, timestamp: int = None):
if len(ephemeral_pubkey) != 64:
raise ValueError("ephemeral_pubkey must be 64 bytes (raw x||y)")
if len(ephemeral_signature) != 64:
raise ValueError("ephemeral_signature must be 64 bytes (raw r||s)")
if len(pfs_hash) != 32:
raise ValueError("pfs_hash must be 32 bytes")
self.ephemeral_pubkey = ephemeral_pubkey
self.ephemeral_signature = ephemeral_signature
self.pfs_hash = pfs_hash
self.timestamp = timestamp or (int(time.time() * 1000) & 0xffffffff)
def serialize(self) -> bytes:
"""Serialize the handshake into a 168-byte packet."""
# Pack timestamp and other fields
partial = struct.pack("!I", self.timestamp) + self.ephemeral_pubkey + self.ephemeral_signature + self.pfs_hash
# Compute CRC
cval = crc32_of(partial)
# Append CRC
return partial + struct.pack("!I", cval)
@classmethod
def deserialize(cls, data: bytes) -> Optional['Handshake']:
"""Deserialize a 168-byte packet into a Handshake object."""
if len(data) != 168:
return None
# Extract and verify CRC
partial = data[:-4]
crc_in = struct.unpack("!I", data[-4:])[0]
crc_calc = crc32_of(partial)
if crc_calc != crc_in:
return None
# Extract fields
timestamp = struct.unpack("!I", partial[:4])[0]
ephemeral_pubkey = partial[4:4+64]
ephemeral_signature = partial[68:68+64]
pfs_hash = partial[132:132+32]
return cls(ephemeral_pubkey, ephemeral_signature, pfs_hash, timestamp)
# =============================================================================
# 4) PFS Hash Helper
# If no previous session, return 32 zero bytes
# Otherwise, compute sha256(session_number || last_shared_secret).
# =============================================================================
def compute_pfs_hash(session_number: int, shared_secret_hex: str) -> bytes:
"""
Compute the PFS hash field for handshake messages:
- If no previous session (session_number < 0), return 32 zero bytes
- Otherwise, compute sha256(session_number || shared_secret)
"""
if session_number < 0:
return b"\x00" * 32
# Convert shared_secret_hex to raw bytes
secret_bytes = bytes.fromhex(shared_secret_hex)
# Pack session_number as 4 bytes
sn_bytes = struct.pack("!I", session_number)
# Compute hash
return hashlib.sha256(sn_bytes + secret_bytes).digest()
# Helper function for CRC32 calculations
def compute_crc32(data: bytes) -> int:
"""Compute CRC32 of data (for consistency with crc32_of)."""
return zlib.crc32(data) & 0xffffffff
# =============================================================================
# Voice Protocol Messages
# =============================================================================
class VoiceStart:
"""
Voice call initiation message (20 bytes).
Fields:
- version: 8 bits (protocol version)
- codec_mode: 8 bits (Codec2 mode)
- fec_type: 8 bits (0=repetition, 1=convolutional, 2=LDPC)
- flags: 8 bits (reserved for future use)
- session_id: 64 bits (unique voice session identifier)
- initial_sequence: 32 bits (starting sequence number)
- crc32: 32 bits
"""
def __init__(self, version: int = 0, codec_mode: int = 5, fec_type: int = 0,
flags: int = 0, session_id: int = None, initial_sequence: int = 0):
self.version = version
self.codec_mode = codec_mode
self.fec_type = fec_type
self.flags = flags | 0x80 # Set high bit to distinguish from VoiceSync
self.session_id = session_id or int.from_bytes(os.urandom(8), 'big')
self.initial_sequence = initial_sequence
def serialize(self) -> bytes:
"""Serialize to 20 bytes."""
# Pack all fields except CRC
data = struct.pack('>BBBBQII',
self.version,
self.codec_mode,
self.fec_type,
self.flags,
self.session_id,
self.initial_sequence,
0 # CRC placeholder
)
# Calculate and append CRC
crc = compute_crc32(data[:-4])
return data[:-4] + struct.pack('>I', crc)
@classmethod
def deserialize(cls, data: bytes) -> Optional['VoiceStart']:
"""Deserialize from bytes."""
if len(data) != 20:
return None
try:
version, codec_mode, fec_type, flags, session_id, initial_seq, crc = struct.unpack('>BBBBQII', data)
# Verify CRC
expected_crc = compute_crc32(data[:-4])
if crc != expected_crc:
return None
return cls(version, codec_mode, fec_type, flags, session_id, initial_seq)
except struct.error:
return None
class VoiceAck:
"""
Voice call acknowledgment message (16 bytes).
Fields:
- version: 8 bits
- status: 8 bits (0=reject, 1=accept)
- codec_mode: 8 bits (negotiated codec mode)
- fec_type: 8 bits (negotiated FEC type)
- session_id: 64 bits (echo of received session_id)
- crc32: 32 bits
"""
def __init__(self, version: int = 0, status: int = 1, codec_mode: int = 5,
fec_type: int = 0, session_id: int = 0):
self.version = version
self.status = status
self.codec_mode = codec_mode
self.fec_type = fec_type
self.session_id = session_id
def serialize(self) -> bytes:
"""Serialize to 16 bytes."""
data = struct.pack('>BBBBQI',
self.version,
self.status,
self.codec_mode,
self.fec_type,
self.session_id,
0 # CRC placeholder
)
crc = compute_crc32(data[:-4])
return data[:-4] + struct.pack('>I', crc)
@classmethod
def deserialize(cls, data: bytes) -> Optional['VoiceAck']:
"""Deserialize from bytes."""
if len(data) != 16:
return None
try:
version, status, codec_mode, fec_type, session_id, crc = struct.unpack('>BBBBQI', data)
expected_crc = compute_crc32(data[:-4])
if crc != expected_crc:
return None
return cls(version, status, codec_mode, fec_type, session_id)
except struct.error:
return None
class VoiceEnd:
"""
Voice call termination message (12 bytes).
Fields:
- session_id: 64 bits
- crc32: 32 bits
"""
def __init__(self, session_id: int):
self.session_id = session_id
def serialize(self) -> bytes:
"""Serialize to 12 bytes."""
data = struct.pack('>QI', self.session_id, 0)
crc = compute_crc32(data[:-4])
return data[:-4] + struct.pack('>I', crc)
@classmethod
def deserialize(cls, data: bytes) -> Optional['VoiceEnd']:
"""Deserialize from bytes."""
if len(data) != 12:
return None
try:
session_id, crc = struct.unpack('>QI', data)
expected_crc = compute_crc32(data[:-4])
if crc != expected_crc:
return None
return cls(session_id)
except struct.error:
return None
class VoiceSync:
"""
Voice synchronization frame (20 bytes).
Used for maintaining sync and providing timing information.
Fields:
- session_id: 64 bits
- sequence: 32 bits
- timestamp: 32 bits (milliseconds since voice start)
- crc32: 32 bits
"""
def __init__(self, session_id: int, sequence: int, timestamp: int):
self.session_id = session_id
self.sequence = sequence
self.timestamp = timestamp
def serialize(self) -> bytes:
"""Serialize to 20 bytes."""
data = struct.pack('>QIII', self.session_id, self.sequence, self.timestamp, 0)
crc = compute_crc32(data[:-4])
return data[:-4] + struct.pack('>I', crc)
@classmethod
def deserialize(cls, data: bytes) -> Optional['VoiceSync']:
"""Deserialize from bytes."""
if len(data) != 20:
return None
try:
session_id, sequence, timestamp, crc = struct.unpack('>QIII', data)
expected_crc = compute_crc32(data[:-4])
if crc != expected_crc:
return None
return cls(session_id, sequence, timestamp)
except struct.error:
return None

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,100 @@
import socket
import threading
from typing import Callable
class PeerConnection:
"""
Represents a live, two-way connection to a peer.
We keep a socket open, read data in a background thread,
and can send data from the main thread at any time.
"""
def __init__(self, sock: socket.socket, on_data_received: Callable[['PeerConnection', bytes], None]):
self.sock = sock
self.on_data_received = on_data_received
self.alive = True
self.read_thread = threading.Thread(target=self.read_loop, daemon=True)
self.read_thread.start()
def read_loop(self):
while self.alive:
try:
data = self.sock.recv(4096)
if not data:
break
self.on_data_received(self, data)
except OSError:
break
self.alive = False
self.sock.close()
print("[PeerConnection] Connection closed.")
def send(self, data: bytes):
if not self.alive:
print("[PeerConnection.send] Cannot send, connection not alive.")
return
try:
self.sock.sendall(data)
except OSError:
print("[PeerConnection.send] Send failed, connection might be closed.")
self.alive = False
def close(self):
self.alive = False
try:
self.sock.shutdown(socket.SHUT_RDWR)
except OSError:
pass
self.sock.close()
class ServerListener(threading.Thread):
"""
A thread that listens on a given port. When a new client connects,
it creates a PeerConnection for that client.
"""
def __init__(self, host: str, port: int,
on_new_connection: Callable[[PeerConnection], None],
on_data_received: Callable[[PeerConnection, bytes], None]):
super().__init__(daemon=True)
self.host = host
self.port = port
self.on_new_connection = on_new_connection
self.on_data_received = on_data_received
self.server_socket = None
self.stop_event = threading.Event()
def run(self):
self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server_socket.bind((self.host, self.port))
self.server_socket.listen(5)
self.server_socket.settimeout(1.0)
print(f"[ServerListener] Listening on {self.host}:{self.port}")
while not self.stop_event.is_set():
try:
client_sock, addr = self.server_socket.accept()
print(f"[ServerListener] Accepted connection from {addr}")
conn = PeerConnection(client_sock, self.on_data_received)
self.on_new_connection(conn)
except socket.timeout:
pass
except OSError:
break
if self.server_socket:
self.server_socket.close()
def stop(self):
self.stop_event.set()
if self.server_socket:
self.server_socket.close()
def connect_to_peer(host: str, port: int,
on_data_received: Callable[[PeerConnection, bytes], None]) -> PeerConnection:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((host, port))
print(f"[connect_to_peer] Connected to {host}:{port}")
conn = PeerConnection(sock, on_data_received)
return conn

View File

@ -0,0 +1,716 @@
"""
Voice codec integration for encrypted voice over GSM.
Implements Codec2 compression with FSK modulation for transmitting
encrypted voice data over standard GSM voice channels.
"""
import array
import math
import struct
from typing import Optional, Tuple, List
from dataclasses import dataclass
from enum import IntEnum
try:
import numpy as np
HAS_NUMPY = True
except ImportError:
HAS_NUMPY = False
# ANSI colors
RED = "\033[91m"
GREEN = "\033[92m"
YELLOW = "\033[93m"
BLUE = "\033[94m"
RESET = "\033[0m"
class Codec2Mode(IntEnum):
"""Codec2 bitrate modes."""
MODE_3200 = 0 # 3200 bps
MODE_2400 = 1 # 2400 bps
MODE_1600 = 2 # 1600 bps
MODE_1400 = 3 # 1400 bps
MODE_1300 = 4 # 1300 bps
MODE_1200 = 5 # 1200 bps (recommended for robustness)
MODE_700C = 6 # 700 bps
@dataclass
class Codec2Frame:
"""Represents a single Codec2 compressed voice frame."""
mode: Codec2Mode
bits: bytes
timestamp: float
frame_number: int
class Codec2Wrapper:
"""
Wrapper for Codec2 voice codec.
In production, this would use py_codec2 or ctypes bindings to libcodec2.
This is a simulation interface for protocol development.
"""
# Frame sizes in bits for each mode
FRAME_BITS = {
Codec2Mode.MODE_3200: 64,
Codec2Mode.MODE_2400: 48,
Codec2Mode.MODE_1600: 64,
Codec2Mode.MODE_1400: 56,
Codec2Mode.MODE_1300: 52,
Codec2Mode.MODE_1200: 48,
Codec2Mode.MODE_700C: 28
}
# Frame duration in ms
FRAME_MS = {
Codec2Mode.MODE_3200: 20,
Codec2Mode.MODE_2400: 20,
Codec2Mode.MODE_1600: 40,
Codec2Mode.MODE_1400: 40,
Codec2Mode.MODE_1300: 40,
Codec2Mode.MODE_1200: 40,
Codec2Mode.MODE_700C: 40
}
def __init__(self, mode: Codec2Mode = Codec2Mode.MODE_1200):
"""
Initialize Codec2 wrapper.
Args:
mode: Codec2 bitrate mode (default 1200 bps for robustness)
"""
self.mode = mode
self.frame_bits = self.FRAME_BITS[mode]
self.frame_bytes = (self.frame_bits + 7) // 8
self.frame_ms = self.FRAME_MS[mode]
self.frame_samples = int(8000 * self.frame_ms / 1000) # 8kHz sampling
self.frame_counter = 0
print(f"{GREEN}[CODEC2]{RESET} Initialized in mode {mode.name} "
f"({self.frame_bits} bits/frame, {self.frame_ms}ms duration)")
def encode(self, audio_samples) -> Optional[Codec2Frame]:
"""
Encode PCM audio samples to Codec2 frame.
Args:
audio_samples: PCM samples (8kHz, 16-bit signed)
Returns:
Codec2Frame or None if insufficient samples
"""
if len(audio_samples) < self.frame_samples:
return None
# In production: call codec2_encode(state, bits, samples)
# Simulation: create pseudo-compressed data
compressed = self._simulate_compression(audio_samples[:self.frame_samples])
frame = Codec2Frame(
mode=self.mode,
bits=compressed,
timestamp=self.frame_counter * self.frame_ms / 1000.0,
frame_number=self.frame_counter
)
self.frame_counter += 1
return frame
def decode(self, frame: Codec2Frame):
"""
Decode Codec2 frame to PCM audio samples.
Args:
frame: Codec2 compressed frame
Returns:
PCM samples (8kHz, 16-bit signed)
"""
if frame.mode != self.mode:
raise ValueError(f"Frame mode {frame.mode} doesn't match decoder mode {self.mode}")
# In production: call codec2_decode(state, samples, bits)
# Simulation: decompress to audio
return self._simulate_decompression(frame.bits)
def _simulate_compression(self, samples) -> bytes:
"""Simulate Codec2 compression (for testing)."""
# Convert to list if needed
if hasattr(samples, 'tolist'):
sample_list = samples.tolist()
elif hasattr(samples, '__iter__'):
sample_list = list(samples)
else:
sample_list = samples
# Extract basic features for simulation
if HAS_NUMPY and hasattr(samples, '__array__'):
# Convert to numpy array if needed
np_samples = np.asarray(samples, dtype=np.float32)
if len(np_samples) > 0:
mean_square = np.mean(np_samples ** 2)
energy = np.sqrt(mean_square) if not np.isnan(mean_square) else 0.0
zero_crossings = np.sum(np.diff(np.sign(np_samples)) != 0)
else:
energy = 0.0
zero_crossings = 0
else:
# Manual calculation without numpy
if sample_list and len(sample_list) > 0:
energy = math.sqrt(sum(s**2 for s in sample_list) / len(sample_list))
zero_crossings = sum(1 for i in range(1, len(sample_list))
if (sample_list[i-1] >= 0) != (sample_list[i] >= 0))
else:
energy = 0.0
zero_crossings = 0
# Pack into bytes (simplified)
# Ensure values are valid
energy_int = max(0, min(65535, int(energy)))
zc_int = max(0, min(65535, int(zero_crossings)))
data = struct.pack('<HH', energy_int, zc_int)
# Pad to expected frame size
data += b'\x00' * (self.frame_bytes - len(data))
return data[:self.frame_bytes]
def _simulate_decompression(self, compressed: bytes):
"""Simulate Codec2 decompression (for testing)."""
# Unpack features
if len(compressed) >= 4:
energy, zero_crossings = struct.unpack('<HH', compressed[:4])
else:
energy, zero_crossings = 1000, 100
# Generate synthetic speech-like signal
if HAS_NUMPY:
t = np.linspace(0, self.frame_ms/1000, self.frame_samples)
# Base frequency from zero crossings
freq = zero_crossings * 10 # Simplified mapping
# Generate harmonics
signal = np.zeros(self.frame_samples)
for harmonic in range(1, 4):
signal += np.sin(2 * np.pi * freq * harmonic * t) / harmonic
# Apply energy envelope
signal *= energy / 10000.0
# Convert to 16-bit PCM
return (signal * 32767).astype(np.int16)
else:
# Manual generation without numpy
samples = []
freq = zero_crossings * 10
for i in range(self.frame_samples):
t = i / 8000.0 # 8kHz sample rate
value = 0
for harmonic in range(1, 4):
value += math.sin(2 * math.pi * freq * harmonic * t) / harmonic
value *= energy / 10000.0
# Clamp to 16-bit range
sample = int(value * 32767)
sample = max(-32768, min(32767, sample))
samples.append(sample)
return array.array('h', samples)
class FSKModem:
"""
4-FSK modem for transmitting digital data over voice channels.
Designed to survive GSM/AMR/EVS vocoders.
"""
def __init__(self, sample_rate: int = 8000, baud_rate: int = 600):
"""
Initialize FSK modem.
Args:
sample_rate: Audio sample rate (Hz)
baud_rate: Symbol rate (baud)
"""
self.sample_rate = sample_rate
self.baud_rate = baud_rate
self.samples_per_symbol = int(sample_rate / baud_rate)
# 4-FSK frequencies (300-3400 Hz band)
self.frequencies = [
600, # 00
1200, # 01
1800, # 10
2400 # 11
]
# Preamble for synchronization (800 Hz, 100ms)
self.preamble_freq = 800
self.preamble_duration = 0.1 # seconds
print(f"{GREEN}[FSK]{RESET} Initialized 4-FSK modem "
f"({baud_rate} baud, frequencies: {self.frequencies})")
def modulate(self, data: bytes, add_preamble: bool = True):
"""
Modulate binary data to FSK audio signal.
Args:
data: Binary data to modulate
add_preamble: Whether to add synchronization preamble
Returns:
Audio signal (normalized float32 array or list)
"""
# Convert bytes to dibits (2-bit symbols)
symbols = []
for byte in data:
symbols.extend([
(byte >> 6) & 0x03,
(byte >> 4) & 0x03,
(byte >> 2) & 0x03,
byte & 0x03
])
# Generate audio signal
signal = []
# Add preamble
if add_preamble:
preamble_samples = int(self.preamble_duration * self.sample_rate)
if HAS_NUMPY:
t = np.arange(preamble_samples) / self.sample_rate
preamble = np.sin(2 * np.pi * self.preamble_freq * t)
signal.extend(preamble)
else:
for i in range(preamble_samples):
t = i / self.sample_rate
value = math.sin(2 * math.pi * self.preamble_freq * t)
signal.append(value)
# Modulate symbols
for symbol in symbols:
freq = self.frequencies[symbol]
if HAS_NUMPY:
t = np.arange(self.samples_per_symbol) / self.sample_rate
tone = np.sin(2 * np.pi * freq * t)
signal.extend(tone)
else:
for i in range(self.samples_per_symbol):
t = i / self.sample_rate
value = math.sin(2 * math.pi * freq * t)
signal.append(value)
# Apply smoothing to reduce clicks
if HAS_NUMPY:
audio = np.array(signal, dtype=np.float32)
else:
audio = array.array('f', signal)
audio = self._apply_envelope(audio)
return audio
def demodulate(self, audio) -> Tuple[bytes, float]:
"""
Demodulate FSK audio signal to binary data.
Args:
audio: Audio signal
Returns:
Tuple of (demodulated data, confidence score)
"""
# Find preamble
preamble_start = self._find_preamble(audio)
if preamble_start < 0:
return b'', 0.0
# Skip preamble
data_start = preamble_start + int(self.preamble_duration * self.sample_rate)
# Demodulate symbols
symbols = []
confidence_scores = []
pos = data_start
while pos + self.samples_per_symbol <= len(audio):
symbol_audio = audio[pos:pos + self.samples_per_symbol]
symbol, confidence = self._demodulate_symbol(symbol_audio)
symbols.append(symbol)
confidence_scores.append(confidence)
pos += self.samples_per_symbol
# Convert symbols to bytes
data = bytearray()
for i in range(0, len(symbols), 4):
if i + 3 < len(symbols):
byte = (symbols[i] << 6) | (symbols[i+1] << 4) | (symbols[i+2] << 2) | symbols[i+3]
data.append(byte)
if HAS_NUMPY and confidence_scores:
avg_confidence = np.mean(confidence_scores)
else:
avg_confidence = sum(confidence_scores) / len(confidence_scores) if confidence_scores else 0.0
return bytes(data), avg_confidence
def _find_preamble(self, audio) -> int:
"""Find preamble in audio signal."""
# Simple energy-based detection
window_size = int(0.01 * self.sample_rate) # 10ms window
if HAS_NUMPY:
for i in range(0, len(audio) - window_size, window_size // 2):
window = audio[i:i + window_size]
# Check for preamble frequency
fft = np.fft.fft(window)
freqs = np.fft.fftfreq(len(window), 1/self.sample_rate)
# Find peak near preamble frequency
idx = np.argmax(np.abs(fft[:len(fft)//2]))
peak_freq = abs(freqs[idx])
if abs(peak_freq - self.preamble_freq) < 50: # 50 Hz tolerance
return i
else:
# Simple zero-crossing based detection without FFT
for i in range(0, len(audio) - window_size, window_size // 2):
window = list(audio[i:i + window_size])
# Count zero crossings
zero_crossings = 0
for j in range(1, len(window)):
if (window[j-1] >= 0) != (window[j] >= 0):
zero_crossings += 1
# Estimate frequency from zero crossings
estimated_freq = (zero_crossings * self.sample_rate) / (2 * len(window))
if abs(estimated_freq - self.preamble_freq) < 100: # 100 Hz tolerance
return i
return -1
def _demodulate_symbol(self, audio) -> Tuple[int, float]:
"""Demodulate a single FSK symbol."""
if HAS_NUMPY:
# FFT-based demodulation
fft = np.fft.fft(audio)
freqs = np.fft.fftfreq(len(audio), 1/self.sample_rate)
magnitude = np.abs(fft[:len(fft)//2])
# Find energy at each FSK frequency
energies = []
for freq in self.frequencies:
idx = np.argmin(np.abs(freqs[:len(freqs)//2] - freq))
energy = magnitude[idx]
energies.append(energy)
# Select symbol with highest energy
symbol = np.argmax(energies)
else:
# Goertzel algorithm for specific frequency detection
audio_list = list(audio) if hasattr(audio, '__iter__') else audio
energies = []
for freq in self.frequencies:
# Goertzel algorithm
omega = 2 * math.pi * freq / self.sample_rate
coeff = 2 * math.cos(omega)
s_prev = 0
s_prev2 = 0
for sample in audio_list:
s = sample + coeff * s_prev - s_prev2
s_prev2 = s_prev
s_prev = s
# Calculate magnitude
power = s_prev2 * s_prev2 + s_prev * s_prev - coeff * s_prev * s_prev2
energies.append(math.sqrt(abs(power)))
# Select symbol with highest energy
symbol = energies.index(max(energies))
# Confidence is ratio of strongest to second strongest
sorted_energies = sorted(energies, reverse=True)
confidence = sorted_energies[0] / (sorted_energies[1] + 1e-6)
return symbol, min(confidence, 10.0) / 10.0
def _apply_envelope(self, audio):
"""Apply smoothing envelope to reduce clicks."""
# Simple raised cosine envelope
ramp_samples = int(0.002 * self.sample_rate) # 2ms ramps
if len(audio) > 2 * ramp_samples:
if HAS_NUMPY:
# Fade in
t = np.linspace(0, np.pi/2, ramp_samples)
audio[:ramp_samples] *= np.sin(t) ** 2
# Fade out
audio[-ramp_samples:] *= np.sin(t[::-1]) ** 2
else:
# Manual fade in
for i in range(ramp_samples):
t = (i / ramp_samples) * (math.pi / 2)
factor = math.sin(t) ** 2
audio[i] *= factor
# Manual fade out
for i in range(ramp_samples):
t = ((ramp_samples - 1 - i) / ramp_samples) * (math.pi / 2)
factor = math.sin(t) ** 2
audio[-(i+1)] *= factor
return audio
class VoiceProtocol:
"""
Integrates voice codec and modem with the Icing protocol
for encrypted voice transmission over GSM.
"""
def __init__(self, protocol_instance):
"""
Initialize voice protocol handler.
Args:
protocol_instance: IcingProtocol instance
"""
self.protocol = protocol_instance
self.codec = Codec2Wrapper(Codec2Mode.MODE_1200)
self.modem = FSKModem(sample_rate=8000, baud_rate=600)
# Voice crypto state
self.voice_iv_counter = 0
self.voice_sequence = 0
# Buffers
if HAS_NUMPY:
self.audio_buffer = np.array([], dtype=np.int16)
else:
self.audio_buffer = array.array('h') # 16-bit signed integers
self.frame_buffer = []
print(f"{GREEN}[VOICE]{RESET} Voice protocol initialized")
def process_voice_input(self, audio_samples):
"""
Process voice input: compress, encrypt, and modulate.
Args:
audio_samples: PCM audio samples (8kHz, 16-bit)
Returns:
Modulated audio signal ready for transmission (numpy array or array.array)
"""
# Add to buffer
if HAS_NUMPY:
self.audio_buffer = np.concatenate([self.audio_buffer, audio_samples])
else:
self.audio_buffer.extend(audio_samples)
# Process complete frames
modulated_audio = []
while len(self.audio_buffer) >= self.codec.frame_samples:
# Extract frame
if HAS_NUMPY:
frame_audio = self.audio_buffer[:self.codec.frame_samples]
self.audio_buffer = self.audio_buffer[self.codec.frame_samples:]
else:
frame_audio = array.array('h', self.audio_buffer[:self.codec.frame_samples])
del self.audio_buffer[:self.codec.frame_samples]
# Compress with Codec2
compressed_frame = self.codec.encode(frame_audio)
if not compressed_frame:
continue
# Encrypt frame
encrypted = self._encrypt_voice_frame(compressed_frame)
# Add FEC
protected = self._add_fec(encrypted)
# Modulate to audio
audio_signal = self.modem.modulate(protected, add_preamble=True)
modulated_audio.append(audio_signal)
if modulated_audio:
if HAS_NUMPY:
return np.concatenate(modulated_audio)
else:
# Concatenate array.array objects
result = array.array('f')
for audio in modulated_audio:
result.extend(audio)
return result
return None
def process_voice_output(self, modulated_audio):
"""
Process received audio: demodulate, decrypt, and decompress.
Args:
modulated_audio: Received FSK-modulated audio
Returns:
Decoded PCM audio samples (numpy array or array.array)
"""
# Demodulate
data, confidence = self.modem.demodulate(modulated_audio)
if confidence < 0.5:
print(f"{YELLOW}[VOICE]{RESET} Low demodulation confidence: {confidence:.2f}")
return None
# Remove FEC
frame_data = self._remove_fec(data)
if not frame_data:
return None
# Decrypt
compressed_frame = self._decrypt_voice_frame(frame_data)
if not compressed_frame:
return None
# Decompress
audio_samples = self.codec.decode(compressed_frame)
return audio_samples
def _encrypt_voice_frame(self, frame: Codec2Frame) -> bytes:
"""Encrypt a voice frame using ChaCha20-CTR."""
if not self.protocol.hkdf_key:
raise ValueError("No encryption key available")
# Prepare frame data
frame_data = struct.pack('<BIH',
frame.mode,
frame.frame_number,
len(frame.bits)
) + frame.bits
# Generate IV for this frame (ChaCha20 needs 16 bytes)
iv = struct.pack('<Q', self.voice_iv_counter) + b'\x00' * 8 # 8 + 8 = 16 bytes
self.voice_iv_counter += 1
# Encrypt using ChaCha20
from encryption import chacha20_encrypt
key = bytes.fromhex(self.protocol.hkdf_key)
encrypted = chacha20_encrypt(frame_data, key, iv)
# Add sequence number and IV hint
return struct.pack('<HQ', self.voice_sequence, self.voice_iv_counter) + encrypted
def _decrypt_voice_frame(self, data: bytes) -> Optional[Codec2Frame]:
"""Decrypt a voice frame."""
if len(data) < 10:
return None
# Extract sequence and IV hint
sequence, iv_hint = struct.unpack('<HQ', data[:10])
encrypted = data[10:]
# Generate IV (16 bytes for ChaCha20)
iv = struct.pack('<Q', iv_hint) + b'\x00' * 8
# Decrypt
from encryption import chacha20_decrypt
key = bytes.fromhex(self.protocol.hkdf_key)
try:
decrypted = chacha20_decrypt(encrypted, key, iv)
# Parse frame
mode, frame_num, bits_len = struct.unpack('<BIH', decrypted[:7])
bits = decrypted[7:7+bits_len]
return Codec2Frame(
mode=Codec2Mode(mode),
bits=bits,
timestamp=0, # Will be set by caller
frame_number=frame_num
)
except Exception as e:
print(f"{RED}[VOICE]{RESET} Decryption failed: {e}")
return None
def _add_fec(self, data: bytes) -> bytes:
"""Add forward error correction."""
# Simple repetition code (3x) for testing
# In production: use convolutional code or LDPC
fec_data = bytearray()
for byte in data:
# Repeat each byte 3 times
fec_data.extend([byte, byte, byte])
return bytes(fec_data)
def _remove_fec(self, data: bytes) -> Optional[bytes]:
"""Remove FEC and correct errors."""
if len(data) % 3 != 0:
return None
corrected = bytearray()
for i in range(0, len(data), 3):
# Majority voting
votes = [data[i], data[i+1], data[i+2]]
byte_value = max(set(votes), key=votes.count)
corrected.append(byte_value)
return bytes(corrected)
# Example usage
if __name__ == "__main__":
# Test Codec2 wrapper
print(f"\n{BLUE}=== Testing Codec2 Wrapper ==={RESET}")
codec = Codec2Wrapper(Codec2Mode.MODE_1200)
# Generate test audio
if HAS_NUMPY:
t = np.linspace(0, 0.04, 320) # 40ms at 8kHz
test_audio = (np.sin(2 * np.pi * 440 * t) * 16384).astype(np.int16)
else:
test_audio = array.array('h')
for i in range(320):
t = i * 0.04 / 320
value = int(math.sin(2 * math.pi * 440 * t) * 16384)
test_audio.append(value)
# Encode
frame = codec.encode(test_audio)
print(f"Encoded frame: {len(frame.bits)} bytes")
# Decode
decoded = codec.decode(frame)
print(f"Decoded audio: {len(decoded)} samples")
# Test FSK modem
print(f"\n{BLUE}=== Testing FSK Modem ==={RESET}")
modem = FSKModem()
# Test data
test_data = b"Hello, secure voice!"
# Modulate
modulated = modem.modulate(test_data)
print(f"Modulated: {len(modulated)} samples ({len(modulated)/8000:.2f}s)")
# Demodulate
demodulated, confidence = modem.demodulate(modulated)
print(f"Demodulated: {demodulated}")
print(f"Confidence: {confidence:.2%}")
print(f"Match: {demodulated == test_data}")