Starting Spark integration
This commit is contained in:
parent
774bce8ec7
commit
29e6749316
45
Private/Spark.ps1
Normal file
45
Private/Spark.ps1
Normal file
|
|
@ -0,0 +1,45 @@
|
|||
$SparkHeaders = @{
|
||||
"Content-Type" = "application/json"
|
||||
"Authorization" = ""
|
||||
"Accept" = "*/*"
|
||||
"Accept-Encoding" = "gzip, deflate, br"
|
||||
}
|
||||
|
||||
$SparkTenantPrefix = "samaritanhealth-amc"
|
||||
$SparkURL = "https://$SparkTenantPrefix.ivanticloud.com"
|
||||
|
||||
Function Connect-ISM {
|
||||
try {
|
||||
#Try a fast route to check if authorization headers are set properly
|
||||
Invoke-RestMethod -Method Get -URI "$SparkURL/api/odata" -Headers $SparkHeaders
|
||||
} catch {
|
||||
$errobject = ConvertFrom-Json $_
|
||||
#A 404 means we were authorized and didn't find anything, as intended!
|
||||
if ($errobject.code -eq "ISM_4004") { return }
|
||||
#Anything other than a 401 Unauthorized is unexpected, attempt to handle gracefully
|
||||
if ($errobject.code -ne "ISM_4001") {
|
||||
Write-Host -ForegroundColor Red "Unexpected error connecting to Spark!"
|
||||
Write-Host -ForegroundColor Red "$errobject"
|
||||
$SparkHeaders["Authorization"] = ""
|
||||
return
|
||||
}
|
||||
#Unuathorized response, so let's update our authorization!
|
||||
if ( $SparkHeaders["Authorization"] ) {
|
||||
Write-Host "Spark Authorization key expired, please update key"
|
||||
}
|
||||
$authKey = Read-Host "Login to Spark, open browser dev tools, and paste SID cookie here, or an API key if you have one"
|
||||
if ($authKey -match "[0-9A-F](32)") {
|
||||
$SparkHeaders["Authorization"] = "rest_api_key=$authKey"
|
||||
} elseif ($authKey -match "$($SparkURL.split('/')[-1])#.*#") {
|
||||
$SparkHeaders["Authorization"] = $authKey
|
||||
} else {
|
||||
Write-Host -ForegroundColor Yellow "Authorization key not a recognized key format"
|
||||
$SparkHeaders["Authorization"] = ""
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Function Check-SparkEnabled {
|
||||
return -not -not $SparkHeaders["Authorization"]
|
||||
}
|
||||
|
||||
Loading…
Reference in a new issue