diff --git a/.gitignore b/.gitignore index 7e7a7bc1d5f..1fe5711df9d 100644 --- a/.gitignore +++ b/.gitignore @@ -18,6 +18,7 @@ bin /.idea/ /.vscode/ +file:/ /resources /vendor /site diff --git a/assets/assets.go b/assets/assets.go index 396ed6285f3..762acb7c5a2 100644 --- a/assets/assets.go +++ b/assets/assets.go @@ -2,13 +2,17 @@ package assets import "embed" // used for embedding KICS libraries - //go:embed libraries/*.rego var embeddedLibraries embed.FS +//go:embed queries/common/passwords_and_secrets/metadata.json +var SecretsQueryMetadataJSON string + +//go:embed queries/common/passwords_and_secrets/regex_rules.json +var SecretsQueryRegexRulesJSON string // GetEmbeddedLibrary returns the embedded library.rego for the platform passed in the argument -func GetEmbeddedLibrary(platform string) (string, error){ +func GetEmbeddedLibrary(platform string) (string, error) { content, err := embeddedLibraries.ReadFile("libraries/" + platform + ".rego") return string(content), err diff --git a/assets/queries/common/passwords_and_secrets_in_infrastructure_code/metadata.json b/assets/queries/common/passwords_and_secrets/metadata.json similarity index 70% rename from assets/queries/common/passwords_and_secrets_in_infrastructure_code/metadata.json rename to assets/queries/common/passwords_and_secrets/metadata.json index fd1c63c80fe..fdb12f2c163 100644 --- a/assets/queries/common/passwords_and_secrets_in_infrastructure_code/metadata.json +++ b/assets/queries/common/passwords_and_secrets/metadata.json @@ -1,6 +1,5 @@ { - "id": "f996f3cb-00fc-480c-8973-8ab04d44a8cc", - "queryName": "Passwords And Secrets In Infrastructure Code", + "queryName": "Passwords And Secrets", "severity": "HIGH", "category": "Secret Management", "descriptionText": "Query to find passwords and secrets in infrastructure code.", diff --git a/assets/queries/common/passwords_and_secrets/regex_rules.json b/assets/queries/common/passwords_and_secrets/regex_rules.json new file mode 100644 index 00000000000..949d3acc6b9 --- /dev/null +++ b/assets/queries/common/passwords_and_secrets/regex_rules.json @@ -0,0 +1,269 @@ +{ + "rules": [ + { + "id": "487f4be7-3fd9-4506-a07a-eae252180c08", + "name": "Generic Password", + "regex": "(?i)['\"]?password['\"]?\\s*[:=]\\s*['\"]?([A-Za-z0-9/~^_!@&%()=?*+-.]{4,})['\"]?", + "allowRules": [ + { + "description": "Avoiding TF resource access", + "regex": "(?i)['\"]?password['\"]?\\s*=\\s*([a-zA-z_]+(.))?[a-zA-z_]+(.)[a-zA-z_]+(.)[a-zA-z_]+" + } + ] + }, + { + "id": "3e2d3b2f-c22a-4df1-9cc6-a7a0aebb0c99", + "name": "Generic Secret", + "regex": "(?i)['\"]?secret[_]?(key)?['\"]?\\s*(:|=)\\s*['\"]?([A-Za-z0-9/~^_!@&%()=?*+-]{10,})['\"]?", + "entropies": [ + { + "group": 3, + "min": 2.8, + "max": 8 + } + ], + "allowRules": [ + { + "description": "Avoiding Square OAuth Secret", + "regex": "(?i)['\"]?secret[_]?(key)?['\"]?\\s*(:|=)\\s*['\"]?(sq0csp-[0-9A-Za-z\\-_]{43})['\"]?" + } + ] + }, + { + "id": "51b5b840-cd0c-4556-98a7-fe5f4def80cf", + "name": "Asymmetric private key", + "regex": "-----BEGIN ((EC|PGP|DSA|RSA|OPENSSH) )?PRIVATE KEY( BLOCK)?-----(\\s*([A-Za-z0-9+\\/=\\n\\r]+))+-----END ((EC|PGP|DSA|RSA|OPENSSH) )?PRIVATE KEY( BLOCK)?-----", + "multiline": { + "detectLineGroup": 5 + }, + "entropies": [ + { + "group": 5, + "min": 3.7, + "max": 12 + } + ] + }, + { + "id": "a007a85e-a2a7-4a81-803a-7a2ca0c65abb", + "name": "Putty Private Key", + "regex": "PuTTY-User-Key-File-2" + }, + { + "id": "c4d3b58a-e6d4-450f-9340-04f1e702eaae", + "name": "Password in URL", + "regex": "[a-zA-Z]{3,10}://[^/\\s:@]*?:[^/\\s:@]*?@[^/\\s:@]*" + }, + { + "id": "76c0bcde-903d-456e-ac13-e58c34987852", + "name": "AWS Access Key", + "regex": "(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}" + }, + { + "id": "83ab47ff-381d-48cd-bac5-fb32222f54af", + "name": "AWS Secret Key", + "regex": "(?i)AWS_SECRET(_ACCESS)?(_KEY)?\\s*[:=]\\s*['\"]?([a-zA-Z0-9/]{40})[\"']?", + "entropies": [ + { + "group": 3, + "min": 4.8, + "max": 7 + } + ] + }, + { + "id": "4b2b5fd3-364d-4093-bac2-17391b2a5297", + "name": "K8s Environment Variable Password", + "regex": "apiVersion((.*)\\s*)*env:((.*)\\s*)*name:\\s*\\w+(?i)pass((?i)word)?\\w*\\s*(value):\\s*([\"|'].*[\"|'])", + "multiline": { + "detectLineGroup": 7 + } + }, + { + "id": "d651cca2-2156-4d17-8e76-423e68de5c8b", + "name": "Google OAuth", + "regex": "[0-9]+-[0-9A-Za-z_]{32}\\.apps\\.googleusercontent\\.com" + }, + { + "id": "ccde326f-ebc7-4772-8ad5-de66e90a8cc3", + "name": "Slack Webhook", + "regex": "https://hooks.slack.com/services/T[a-zA-Z0-9_]{8}/B[a-zA-Z0-9_]{8}/[a-zA-Z0-9_]{24}" + }, + { + "id": "d6214dca-a31b-425f-bcf7-f4faa772a1c0", + "name": "MSTeams Webhook", + "regex": "https://team_name.webhook.office.com/webhook(b2)?/[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}@[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}/IncomingWebhook/[a-z0-9]+/[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}" + }, + { + "id": "7908a9e3-5cac-41b1-b514-5f6d82ce02d5", + "name": "Slack Token", + "regex": "(xox[p|b|o|a]-[0-9]{12}-[0-9]{12}-[0-9]{12}-[a-z0-9]{32})" + }, + { + "id": "6abcae17-b175-4698-a9a5-b07661974749", + "name": "Stripe API Key", + "regex": "sk_live_[0-9a-zA-Z]{24}[^0-9a-zA-Z]" + }, + { + "id": "0b1b2482-51e7-49d1-893d-522afa4a6bd0", + "name": "Square Access Token", + "regex": "sq0atp-[0-9A-Za-z\\-_]{22}" + }, + { + "id": "6c54f9da-1a11-445a-8568-0d327e6af8be", + "name": "MailChimp API Key", + "regex": "[0-9a-f]{32}-us[0-9]{1,2}" + }, + { + "id": "e9856348-4069-4ac0-bd91-415f6a7b84a4", + "name": "Google API Key", + "regex": "AIza[0-9A-Za-z\\-_]{35}" + }, + { + "id": "9a3650af-5b88-48cd-ab89-cd77fd0b633f", + "name": "Heroku API Key", + "regex": "(?i)heroku((.|\\n)*)\\b([0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12})\\b", + "multiline": { + "detectLineGroup": 3 + } + }, + { + "id": "bb51eb1e-0357-44a2-86d7-dd5350cffd43", + "name": "Square OAuth Secret", + "regex": "sq0csp-[0-9A-Za-z\\-_]{43}" + }, + { + "id": "ac8c8075-6ec0-4367-9e26-30ec8161d258", + "name": "Amazon MWS Auth Token", + "regex": "amzn\\.mws\\.[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}" + }, + { + "id": "41a1ca8d-f466-4084-a8c9-50f8b22200d5", + "name": "Google OAuth Access Token", + "regex": "ya29\\.[0-9A-Za-z\\-_]+" + }, + { + "id": "4919b847-e3da-402a-acf8-6cea8e529993", + "name": "PayPal Braintree Access Token", + "regex": "access_token\\$production\\$[0-9a-z]{16}\\$[0-9a-f]{32}" + }, + { + "id": "54274b18-bfac-47ce-afd1-0f05bc3e3b59", + "name": "Stripe Restricted API Key", + "regex": "rk_live_[0-9a-zA-Z]{24}" + }, + { + "id": "5176e805-0cda-44fa-ac96-c092c646180a", + "name": "Facebook Access Token", + "regex": "EAACEdEose0cBA[0-9A-Za-z]+" + }, + { + "id": "74736dd1-dd11-4139-beb6-41cd43a50317", + "name": "Generic API Key", + "regex": "(?i)['\"]?api[_]?key['\"]?\\s*[:=]\\s*['\"]?([0-9a-zA-Z]{32,45})['\"]?", + "allowRules": [ + { + "description": "Avoiding Twilio API Key", + "regex": "(?i)['\"]?api[_]?key['\"]?\\s*[:=]\\s*['\"]?(SK[0-9a-fA-F]{32})['\"]?" + } + ] + }, + { + "id": "62d0025d-9575-4eff-b60b-d3b4fcec0d04", + "name": "Mailgun API Key", + "regex": "key-[0-9a-zA-Z]{32}" + }, + { + "id": "50cc5f03-e686-4183-97e9-12f9b55d0f97", + "name": "Picatic API Key", + "regex": "sk_live_[0-9a-z]{32}" + }, + { + "id": "e0f01838-b1c2-4669-b84b-981949ebe5ed", + "name": "Twilio API Key", + "regex": "SK[0-9a-fA-F]{32}" + }, + { + "id": "2f665079-c383-4b33-896e-88268c1fa258", + "name": "Generic Private Key", + "regex": "(?i)['\"]?private[_]?key['\"]?\\s*[:=]\\s*['\"]?([[A-Za-z0-9/~^_!@&%()=?*+-]+)['\"]?" + }, + { + "id": "baee238e-1921-4801-9c3f-79ae1d7b2cbc", + "name": "Generic Token", + "regex": "(?i)['\"]?token(_)?(key)?['\"]?\\s*[:=]\\s*['\"]?([[A-Za-z0-9/~^_!@&%()=?*+-]+)['\"]?", + "allowRules": [ + { + "description": "Avoiding Amazon MWS Auth Token", + "regex": "(?i)['\"]?token(_)?(key)?['\"]?\\s*[=:]\\s*['\"]?(amzn\\.mws\\.[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})['\"]?" + }, + { + "description": "Avoiding Slack Token", + "regex": "(?i)['\"]?token(_)?(key)?['\"]?\\s*[=:]\\s*['\"]?(xox[p|b|o|a]-[0-9]{12}-[0-9]{12}-[0-9]{12}-[a-z0-9]{32})['\"]?" + }, + { + "description": "Avoiding Square Access Token", + "regex": "(?i)['\"]?token(_)?(key)?['\"]?\\s*[=:]\\s*['\"]?(sq0atp-[0-9A-Za-z\\-_]{22})['\"]?" + }, + { + "description": "Avoiding Google OAuth Access Token", + "regex": "(?i)['\"]?token(_)?(key)?['\"]?\\s*[=:]\\s*['\"]?(ya29\\.[0-9A-Za-z\\-_]+)['\"]?" + }, + { + "description": "Avoiding PayPal Braintree Access Token", + "regex": "(?i)['\"]?token(_)?(key)?['\"]?\\s*[=:]\\s*['\"]?(access_token\\$production\\$[0-9a-z]{16}\\$[0-9a-f]{32})['\"]?" + }, + { + "description": "Avoiding Facebook Access Token", + "regex": "(?i)['\"]?token(_)?(key)?['\"]?\\s*[=:]\\s*['\"]?(EAACEdEose0cBA[0-9A-Za-z]+)['\"]?" + }, + { + "description": "Avoiding TF resource access", + "regex": "(?i)['\"]?token(_)?(key)?['\"]?\\s*=\\s*([a-zA-z_]+(.))?[a-zA-z_]+(.)[a-zA-z_]+(.)[a-zA-z_]+" + }, + { + "description": "Avoiding TF creation token", + "regex": "(?i)['\"]?creation_token['\"]?\\s*[:=]\\s*['\"]?([[A-Za-z0-9/~^_!@&%()=?*+-]+)['\"]?" + } + ] + }, + { + "id": "e0f01838-b1c2-4669-b84b-981949ebe5ed", + "name": "CloudFormation Secret Template", + "regex": "(?i)['\"]?SecretStringTemplate['\"]?\\s*:\\s*['\"]?{([\\\":A-Za-z0-9/~^_!@&%()=?*+-]{10,})}" + }, + { + "id": "9fb1cd65-7a07-4531-9bcf-47589d0f82d6", + "name": "Encryption Key", + "regex": "(?i)['\"]?encryption[_]?key['\"]?\\s*[:=]\\s*['\"]?([[A-Za-z0-9/~^_!@&%()=?*+-]+)['\"]?", + "allowRules": [ + { + "description": "Avoiding TF resource access", + "regex": "(?i)['\"]?encryption[_]?key['\"]?\\s*=\\s*([a-zA-z_]+(.))?[a-zA-z_]+(.)[a-zA-z_]+(.)[a-zA-z_]+" + } + ] + } + ], + "allowRules": [ + { + "description": "Avoiding TF variables", + "regex": "(?i)['\"]?[a-zA-Z_]+['\"]?\\s*=\\s*['\"]?(var.)['\"]?" + }, + { + "description": "!Ref is a cloudFormation reference", + "regex": "(?i)['\"]?[a-zA-Z_]+['\"]?\\s*:\\s+!Ref\\s+\\.*" + }, + { + "description": "Avoiding cloudFormation intrinsic functions", + "regex": "(?i)['\"]?[a-zA-Z_]+['\"]?\\s*:\\s+(!GetAtt|!Sub|!FindInMap|!If|!GetAZs|!ImportValue|!Join|!Select|!Split|Fn::Transform(:)?)\\s+\\.*" + }, + { + "description": "Avoiding CF resolve", + "regex": "(?i)['\"]?[a-zA-Z_]+['\"]?\\s*[=:]\\s*['\"]?({{resolve:)['\"]?" + }, + { + "description": "Avoiding Boolean's", + "regex": "(?i)['\"]?[a-zA-Z_]+['\"]?\\s*[=:]\\s*['\"]?(true|false)['\"]?" + } + ] +} diff --git a/assets/queries/common/passwords_and_secrets_in_infrastructure_code/test/negative1.yaml b/assets/queries/common/passwords_and_secrets/test/negative1.yaml similarity index 100% rename from assets/queries/common/passwords_and_secrets_in_infrastructure_code/test/negative1.yaml rename to assets/queries/common/passwords_and_secrets/test/negative1.yaml diff --git a/assets/queries/common/passwords_and_secrets/test/negative10.tf b/assets/queries/common/passwords_and_secrets/test/negative10.tf new file mode 100644 index 00000000000..68adb907cd5 --- /dev/null +++ b/assets/queries/common/passwords_and_secrets/test/negative10.tf @@ -0,0 +1,30 @@ +resource "aws_db_instance" "default" { + name = var.dbname + engine = "mysql" + option_group_name = aws_db_option_group.default.name + parameter_group_name = aws_db_parameter_group.default.name + db_subnet_group_name = aws_db_subnet_group.default.name + vpc_security_group_ids = ["${aws_security_group.default.id}"] + identifier = "rds-${local.resource_prefix.value}" + engine_version = "8.0" # Latest major version + instance_class = "db.t3.micro" + allocated_storage = "20" + username = "admin" + password = var.password + apply_immediately = true + multi_az = false + backup_retention_period = 0 + storage_encrypted = false + skip_final_snapshot = true + monitoring_interval = 0 + publicly_accessible = true + tags = { + Name = "${local.resource_prefix.value}-rds" + Environment = local.resource_prefix.value + } + + # Ignore password changes from tf plan diff + lifecycle { + ignore_changes = ["password"] + } +} diff --git a/assets/queries/common/passwords_and_secrets/test/negative18.tf b/assets/queries/common/passwords_and_secrets/test/negative18.tf new file mode 100644 index 00000000000..91111c3c83d --- /dev/null +++ b/assets/queries/common/passwords_and_secrets/test/negative18.tf @@ -0,0 +1,11 @@ +resource "auth0_connection" "google_oauth2" { + name = "Google-OAuth2-Connection" + strategy = "google-oauth2" + options { + client_id = var.google_client_id + client_secret = var.google_client_secret + allowed_audiences = [ "example.com", "api.example.com" ] + scopes = [ "email", "profile", "gmail", "youtube" ] + set_user_root_attributes = "on_each_login" + } +} diff --git a/assets/queries/common/passwords_and_secrets/test/negative19.tf b/assets/queries/common/passwords_and_secrets/test/negative19.tf new file mode 100644 index 00000000000..234d9f04a5e --- /dev/null +++ b/assets/queries/common/passwords_and_secrets/test/negative19.tf @@ -0,0 +1,3 @@ +provider "slack" { + token = var.slack_token +} diff --git a/assets/queries/common/passwords_and_secrets_in_infrastructure_code/test/negative2.yaml b/assets/queries/common/passwords_and_secrets/test/negative2.yaml similarity index 88% rename from assets/queries/common/passwords_and_secrets_in_infrastructure_code/test/negative2.yaml rename to assets/queries/common/passwords_and_secrets/test/negative2.yaml index 8ad5262376e..f7d63e9ec52 100644 --- a/assets/queries/common/passwords_and_secrets_in_infrastructure_code/test/negative2.yaml +++ b/assets/queries/common/passwords_and_secrets/test/negative2.yaml @@ -3,6 +3,7 @@ Resources: RDSCluster: Type: "AWS::RDS::DBCluster" Properties: + MasterUserPassword: !Ref PasswordMaster DBClusterIdentifier: my-serverless-cluster Engine: aurora EngineVersion: 5.6.10a diff --git a/assets/queries/common/passwords_and_secrets/test/negative20.tf b/assets/queries/common/passwords_and_secrets/test/negative20.tf new file mode 100644 index 00000000000..087185f62e0 --- /dev/null +++ b/assets/queries/common/passwords_and_secrets/test/negative20.tf @@ -0,0 +1,3 @@ +provider "stripe" { + api_key = var.strip_api_key +} diff --git a/assets/queries/common/passwords_and_secrets/test/negative21.tf b/assets/queries/common/passwords_and_secrets/test/negative21.tf new file mode 100644 index 00000000000..4f13644463a --- /dev/null +++ b/assets/queries/common/passwords_and_secrets/test/negative21.tf @@ -0,0 +1,85 @@ +resource "aws_ecs_task_definition" "webapp" { + family = "tomato-webapp" + task_role_arn = data.aws_iam_role.ecs_task_role.arn + + container_definitions = <Deployed via Terraform" | sudo tee /var/www/html/index.html +EOF + tags = merge({ + Name = "${local.resource_prefix.value}-ec2" + }, { + git_commit = "d68d2897add9bc2203a5ed0632a5cdd8ff8cefb0" + git_file = "terraform/aws/ec2.tf" + git_last_modified_at = "2020-06-16 14:46:24" + git_last_modified_by = "nimrodkor@gmail.com" + git_modifiers = "nimrodkor" + git_org = "bridgecrewio" + git_repo = "terragoat" + yor_trace = "347af3cd-4f70-4632-aca3-4d5e30ffc0b6" + }) +} + +resource "aws_ebs_volume" "web_host_storage" { + # unencrypted volume + availability_zone = "${var.region}a" + #encrypted = false # Setting this causes the volume to be recreated on apply + size = 1 + tags = merge({ + Name = "${local.resource_prefix.value}-ebs" + }, { + git_commit = "6e62522d2ab8f63740e53752b84a6e99cd65696a" + git_file = "terraform/aws/ec2.tf" + git_last_modified_at = "2021-05-02 11:16:31" + git_last_modified_by = "nimrodkor@gmail.com" + git_modifiers = "nimrodkor" + git_org = "bridgecrewio" + git_repo = "terragoat" + yor_trace = "c5509daf-10f0-46af-9e03-41989212521d" + }) +} + +resource "aws_ebs_snapshot" "example_snapshot" { + # ebs snapshot without encryption + volume_id = "${aws_ebs_volume.web_host_storage.id}" + description = "${local.resource_prefix.value}-ebs-snapshot" + tags = merge({ + Name = "${local.resource_prefix.value}-ebs-snapshot" + }, { + git_commit = "d68d2897add9bc2203a5ed0632a5cdd8ff8cefb0" + git_file = "terraform/aws/ec2.tf" + git_last_modified_at = "2020-06-16 14:46:24" + git_last_modified_by = "nimrodkor@gmail.com" + git_modifiers = "nimrodkor" + git_org = "bridgecrewio" + git_repo = "terragoat" + yor_trace = "c1008080-ec2f-4512-a0d0-2e9330aa58f0" + }) +} + +resource "aws_volume_attachment" "ebs_att" { + device_name = "/dev/sdh" + volume_id = "${aws_ebs_volume.web_host_storage.id}" + instance_id = "${aws_instance.web_host.id}" +} + +resource "aws_security_group" "web-node" { + # security group is open to the world in SSH port + name = "${local.resource_prefix.value}-sg" + description = "${local.resource_prefix.value} Security Group" + vpc_id = aws_vpc.web_vpc.id + + ingress { + from_port = 80 + to_port = 80 + protocol = "tcp" + cidr_blocks = [ + "0.0.0.0/0"] + } + ingress { + from_port = 22 + to_port = 22 + protocol = "tcp" + cidr_blocks = [ + "0.0.0.0/0"] + } + egress { + from_port = 0 + to_port = 0 + protocol = "-1" + cidr_blocks = [ + "0.0.0.0/0"] + } + depends_on = [aws_vpc.web_vpc] + tags = { + git_commit = "d68d2897add9bc2203a5ed0632a5cdd8ff8cefb0" + git_file = "terraform/aws/ec2.tf" + git_last_modified_at = "2020-06-16 14:46:24" + git_last_modified_by = "nimrodkor@gmail.com" + git_modifiers = "nimrodkor" + git_org = "bridgecrewio" + git_repo = "terragoat" + yor_trace = "b7af1b40-64eb-4519-a1a0-ab198db4b193" + } +} + +resource "aws_vpc" "web_vpc" { + cidr_block = "172.16.0.0/16" + enable_dns_hostnames = true + enable_dns_support = true + tags = merge({ + Name = "${local.resource_prefix.value}-vpc" + }, { + git_commit = "d68d2897add9bc2203a5ed0632a5cdd8ff8cefb0" + git_file = "terraform/aws/ec2.tf" + git_last_modified_at = "2020-06-16 14:46:24" + git_last_modified_by = "nimrodkor@gmail.com" + git_modifiers = "nimrodkor" + git_org = "bridgecrewio" + git_repo = "terragoat" + yor_trace = "9bf2359b-952e-4570-9595-52eba4c20473" + }) +} + +resource "aws_subnet" "web_subnet" { + vpc_id = aws_vpc.web_vpc.id + cidr_block = "172.16.10.0/24" + availability_zone = "${var.region}a" + map_public_ip_on_launch = true + + tags = merge({ + Name = "${local.resource_prefix.value}-subnet" + }, { + git_commit = "6e62522d2ab8f63740e53752b84a6e99cd65696a" + git_file = "terraform/aws/ec2.tf" + git_last_modified_at = "2021-05-02 11:16:31" + git_last_modified_by = "nimrodkor@gmail.com" + git_modifiers = "nimrodkor" + git_org = "bridgecrewio" + git_repo = "terragoat" + yor_trace = "0345f650-d280-4ca8-86c9-c71c38c0eda8" + }) +} + +resource "aws_subnet" "web_subnet2" { + vpc_id = aws_vpc.web_vpc.id + cidr_block = "172.16.11.0/24" + availability_zone = "${var.region}b" + map_public_ip_on_launch = true + + tags = merge({ + Name = "${local.resource_prefix.value}-subnet2" + }, { + git_commit = "6e62522d2ab8f63740e53752b84a6e99cd65696a" + git_file = "terraform/aws/ec2.tf" + git_last_modified_at = "2021-05-02 11:16:31" + git_last_modified_by = "nimrodkor@gmail.com" + git_modifiers = "nimrodkor" + git_org = "bridgecrewio" + git_repo = "terragoat" + yor_trace = "224af03a-00e0-4981-be30-14965833c2db" + }) +} + + +resource "aws_internet_gateway" "web_igw" { + vpc_id = aws_vpc.web_vpc.id + + tags = merge({ + Name = "${local.resource_prefix.value}-igw" + }, { + git_commit = "d68d2897add9bc2203a5ed0632a5cdd8ff8cefb0" + git_file = "terraform/aws/ec2.tf" + git_last_modified_at = "2020-06-16 14:46:24" + git_last_modified_by = "nimrodkor@gmail.com" + git_modifiers = "nimrodkor" + git_org = "bridgecrewio" + git_repo = "terragoat" + yor_trace = "d8e63cb4-2fb5-4726-9c86-5fd05ef03674" + }) +} + +resource "aws_route_table" "web_rtb" { + vpc_id = aws_vpc.web_vpc.id + + tags = merge({ + Name = "${local.resource_prefix.value}-rtb" + }, { + git_commit = "d68d2897add9bc2203a5ed0632a5cdd8ff8cefb0" + git_file = "terraform/aws/ec2.tf" + git_last_modified_at = "2020-06-16 14:46:24" + git_last_modified_by = "nimrodkor@gmail.com" + git_modifiers = "nimrodkor" + git_org = "bridgecrewio" + git_repo = "terragoat" + yor_trace = "5e4fee6e-a6aa-4b61-a741-47c5efb463e1" + }) +} + +resource "aws_route_table_association" "rtbassoc" { + subnet_id = aws_subnet.web_subnet.id + route_table_id = aws_route_table.web_rtb.id +} + +resource "aws_route_table_association" "rtbassoc2" { + subnet_id = aws_subnet.web_subnet2.id + route_table_id = aws_route_table.web_rtb.id +} + +resource "aws_route" "public_internet_gateway" { + route_table_id = aws_route_table.web_rtb.id + destination_cidr_block = "0.0.0.0/0" + gateway_id = aws_internet_gateway.web_igw.id + + timeouts { + create = "5m" + } +} + + +resource "aws_network_interface" "web-eni" { + subnet_id = aws_subnet.web_subnet.id + private_ips = ["172.16.10.100"] + + tags = merge({ + Name = "${local.resource_prefix.value}-primary_network_interface" + }, { + git_commit = "d68d2897add9bc2203a5ed0632a5cdd8ff8cefb0" + git_file = "terraform/aws/ec2.tf" + git_last_modified_at = "2020-06-16 14:46:24" + git_last_modified_by = "nimrodkor@gmail.com" + git_modifiers = "nimrodkor" + git_org = "bridgecrewio" + git_repo = "terragoat" + yor_trace = "7e2ffea8-739f-467d-b57b-53cbc0d7ccbe" + }) +} + +# VPC Flow Logs to S3 +resource "aws_flow_log" "vpcflowlogs" { + log_destination = aws_s3_bucket.flowbucket.arn + log_destination_type = "s3" + traffic_type = "ALL" + vpc_id = aws_vpc.web_vpc.id + + tags = merge({ + Name = "${local.resource_prefix.value}-flowlogs" + Environment = local.resource_prefix.value + }, { + git_commit = "d68d2897add9bc2203a5ed0632a5cdd8ff8cefb0" + git_file = "terraform/aws/ec2.tf" + git_last_modified_at = "2020-06-16 14:46:24" + git_last_modified_by = "nimrodkor@gmail.com" + git_modifiers = "nimrodkor" + git_org = "bridgecrewio" + git_repo = "terragoat" + yor_trace = "6808d4b7-45bc-4d1d-9523-96757a3add3a" + }) +} + +resource "aws_s3_bucket" "flowbucket" { + bucket = "${local.resource_prefix.value}-flowlogs" + force_destroy = true + + tags = merge({ + Name = "${local.resource_prefix.value}-flowlogs" + Environment = local.resource_prefix.value + }, { + git_commit = "d68d2897add9bc2203a5ed0632a5cdd8ff8cefb0" + git_file = "terraform/aws/ec2.tf" + git_last_modified_at = "2020-06-16 14:46:24" + git_last_modified_by = "nimrodkor@gmail.com" + git_modifiers = "nimrodkor" + git_org = "bridgecrewio" + git_repo = "terragoat" + yor_trace = "f058838a-b1e0-4383-b965-7e06e987ffb1" + }) +} + +output "ec2_public_dns" { + description = "Web Host Public DNS name" + value = aws_instance.web_host.public_dns +} + +output "vpc_id" { + description = "The ID of the VPC" + value = aws_vpc.web_vpc.id +} + +output "public_subnet" { + description = "The ID of the Public subnet" + value = aws_subnet.web_subnet.id +} + +output "public_subnet2" { + description = "The ID of the Public subnet" + value = aws_subnet.web_subnet2.id +} diff --git a/assets/queries/common/passwords_and_secrets/test/positive16.yaml b/assets/queries/common/passwords_and_secrets/test/positive16.yaml new file mode 100644 index 00000000000..c4d05df9e80 --- /dev/null +++ b/assets/queries/common/passwords_and_secrets/test/positive16.yaml @@ -0,0 +1,38 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: x +spec: + replicas: 5 + selector: + matchLabels: + app: x + template: + metadata: + labels: + app: x + spec: + containers: + - name: x + image: x + ports: + - containerPort: 5432 + env: + - name: PORT + value: "1234" + - name: DB_HOST + value: "127.0.0.1" + - name: DB_PORT + value: "23" + - name: DB_PORT_BD + value: "5432" + - name: DB_HOST_BD + value: "127.0.0.1" + - name: DB_NAME_BD + value: "dbx" + - name: DB_PASS_BD + value: "passx" + - name: DB_PASS_BD_2 + value: "passx" + - name: DB_USER_BD + value: "userx" diff --git a/assets/queries/common/passwords_and_secrets/test/positive17.tf b/assets/queries/common/passwords_and_secrets/test/positive17.tf new file mode 100644 index 00000000000..e3e0e4988ec --- /dev/null +++ b/assets/queries/common/passwords_and_secrets/test/positive17.tf @@ -0,0 +1,13 @@ +resource "azurerm_sql_server" "example" { + name = "terragoat-sqlserver-${var.environment}${random_integer.rnd_int.result}" + resource_group_name = azurerm_resource_group.example.name + location = azurerm_resource_group.example.location + version = "12.0" + administrator_login = "ariel" + administrator_login_password = "Aa12345678" + + tags = { + environment = var.environment + terragoat = "true" + } +} diff --git a/assets/queries/common/passwords_and_secrets/test/positive18.tf b/assets/queries/common/passwords_and_secrets/test/positive18.tf new file mode 100644 index 00000000000..ce620adbfa5 --- /dev/null +++ b/assets/queries/common/passwords_and_secrets/test/positive18.tf @@ -0,0 +1,11 @@ +resource "auth0_connection" "google_oauth2" { + name = "Google-OAuth2-Connection" + strategy = "google-oauth2" + options { + client_id = "53221331-2323wasdfa343rwhthfaf33feaf2fa7f.apps.googleusercontent.com" + client_secret = "j2323232324" + allowed_audiences = [ "example.com", "api.example.com" ] + scopes = [ "email", "profile", "gmail", "youtube" ] + set_user_root_attributes = "on_each_login" + } +} diff --git a/assets/queries/common/passwords_and_secrets/test/positive19.tf b/assets/queries/common/passwords_and_secrets/test/positive19.tf new file mode 100644 index 00000000000..510fd75e614 --- /dev/null +++ b/assets/queries/common/passwords_and_secrets/test/positive19.tf @@ -0,0 +1,3 @@ +provider "slack" { + token = "xoxp-121314151623-121314151623-121314151623-12131423121314151623121314151623" +} diff --git a/assets/queries/common/passwords_and_secrets_in_infrastructure_code/test/positive2.yaml b/assets/queries/common/passwords_and_secrets/test/positive2.yaml similarity index 100% rename from assets/queries/common/passwords_and_secrets_in_infrastructure_code/test/positive2.yaml rename to assets/queries/common/passwords_and_secrets/test/positive2.yaml diff --git a/assets/queries/common/passwords_and_secrets/test/positive20.tf b/assets/queries/common/passwords_and_secrets/test/positive20.tf new file mode 100644 index 00000000000..ef5f929113a --- /dev/null +++ b/assets/queries/common/passwords_and_secrets/test/positive20.tf @@ -0,0 +1,3 @@ +provider "stripe" { + api_key = "sk_live_aSaDsEaSaDsEaSaDs29SaDsE" +} diff --git a/assets/queries/common/passwords_and_secrets/test/positive21.tf b/assets/queries/common/passwords_and_secrets/test/positive21.tf new file mode 100644 index 00000000000..c23eb1a2469 --- /dev/null +++ b/assets/queries/common/passwords_and_secrets/test/positive21.tf @@ -0,0 +1,85 @@ +resource "aws_ecs_task_definition" "webapp" { + family = "tomato-webapp" + task_role_arn = data.aws_iam_role.ecs_task_role.arn + + container_definitions = < 1 - k := splitted[0] - is_string(k) - v := concat("", array.slice(splitted, 1, count(splitted))) - obj := { - "key": k, - "value": v, - "id": original, - } -} - -is_under_password_key(p) { - ar = {"pas", "psw", "pwd"} - contains(lower(p), ar[_]) -} - -is_under_secret_key(p) = res { - ar = {"secret", "encrypt", "credential"} - res := contains(lower(p), ar[_]) -} - -#search for default passwords -check_vulnerability(correctStrings) { - isDefaultPassword(correctStrings.value) - is_under_password_key(correctStrings.key) - - #remove common key and values - check_common(correctStrings) -} - -#search for non-default passwords under known names -check_vulnerability(correctStrings) { - #remove short strings - count(correctStrings.value) > 4 - count(correctStrings.value) < 30 - - #password should contain alpha and numeric and not contain spaces - count(regex.find_n("[a-zA-Z0-9]+", correctStrings.value, -1)) > 0 - count(regex.find_n("^[^{{]+$", correctStrings.value, -1)) > 0 - is_under_password_key(correctStrings.key) - - #remove common key and values - check_common(correctStrings) -} - -#search for non-default passwords with upper, lower chars and digits -check_vulnerability(correctStrings) { #ignore ascii cases - #remove short strings - count(correctStrings.value) > 6 - count(correctStrings.value) < 20 - - #password should contain alpha and numeric and not contain spaces or underscores - count(regex.find_n("[a-z]+", correctStrings.value, -1)) > 0 - count(regex.find_n("[A-Z]+", correctStrings.value, -1)) > 0 - count(regex.find_n("[0-9]+", correctStrings.value, -1)) > 0 - count(regex.find_n("^[^\\s_]+", correctStrings.value, -1)) > 0 - - #remove common key and values - check_common(correctStrings) -} - -#search for harcoded secrets with known prefixes -check_vulnerability(correctStrings) { - #look for a known prefix - contains(correctStrings.value, "PRIVATE KEY") - - #remove common key and values - check_common(correctStrings) -} - -#search for harcoded secret keys under known names -check_vulnerability(correctStrings) { - #remove short strings - count(correctStrings.value) > 8 - - #remove string with non-keys characters - count(regex.find_n("^[^\\s$]+$", correctStrings.value, -1)) > 0 - - #look for a known names - is_under_secret_key(correctStrings.key) - - #remove common key and values - check_common(correctStrings) -} - -#search for harcoded secrets by looking for their values with a special chars and length -check_vulnerability(correctStrings) { - #remove short strings - count(correctStrings.value) > 30 - - #remove string with non-keys characters - count(regex.find_n("^[^\\s/:@,.-_|]+$", correctStrings.value, -1)) > 0 - - #remove common key and values - check_common(correctStrings) -} - -check_common(correctStrings) { - #remove common values - not isCommonValue(correctStrings.value) - - #remove common keys - not commonLib.isCommonKey(correctStrings.key) -} - -#replace unicode values to avoid false positives -replace_unicode(allValues) = treatedValue { - treatedValue_first := replace(allValues, "\\u003c", "<") - treatedValue = replace(treatedValue_first, "\\u003e", ">") -} - -isDefaultPassword(p) { - data.defaultPasswords[_] == p -} - -isCommonValue(p) { - contains(upper(p), data.blackList[_]) -} diff --git a/assets/queries/common/passwords_and_secrets_in_infrastructure_code/test/positive4.tf b/assets/queries/common/passwords_and_secrets_in_infrastructure_code/test/positive4.tf deleted file mode 100644 index 5d0066f90c6..00000000000 --- a/assets/queries/common/passwords_and_secrets_in_infrastructure_code/test/positive4.tf +++ /dev/null @@ -1,119 +0,0 @@ -#this is a problematic code where the query should report a result(s) -resource "google_container_cluster" "primary1" { - name = "marcellus-wallace" - location = "us-central1-a" - initial_node_count = 3 - - master_auth { - username = "" - password = "root" - - client_certificate_config { - issue_client_certificate = true - } - } - - timeouts { - create = "30m" - update = "40m" - } -} - -resource "google_container_cluster" "primary2" { - name = "marcellus-wallace" - location = "us-central1-a" - initial_node_count = 3 - - master_auth { - username = "" - password = "pwd_jsuwauJk212" - - client_certificate_config { - issue_client_certificate = true - } - } - - timeouts { - create = "30m" - update = "40m" - } -} - -resource "google_container_cluster" "primary3" { - name = "marcellus-wallace" - location = "us-central1-a" - initial_node_count = 3 - - master_auth { - username = "1234567890qwertyuiopasdfghjklçzxcvbnm" - password = "" - - client_certificate_config { - issue_client_certificate = true - } - } - - timeouts { - create = "30m" - update = "40m" - } -} - -resource "google_container_cluster" "primary4" { - name = "marcellus-wallace" - location = "us-central1-a" - initial_node_count = 3 - - master_auth { - username = "" - password = "abcd s" - - client_certificate_config { - issue_client_certificate = true - } - } - - timeouts { - create = "30m" - update = "40m" - } -} - -resource "google_container_cluster" "primary5" { - name = "marcellus-wallace-credential" - location = "us-central1-a" - initial_node_count = 3 - - master_auth { - username = "PRIVATE KEY_key" - password = "" - - client_certificate_config { - issue_client_certificate = true - } - } - - timeouts { - create = "30m" - update = "40m" - } -} - -resource "google_secret_manager_secret" "secret-basic" { - secret_id = "secret-version" - - labels = { - label = "my-label" - } - - replication { - automatic = true - } -} - - -resource "google_secret_manager_secret_version" "secret-version-basic" { - secret = "sasdsadwda" - - secret_data = "secret-data" -} diff --git a/assets/queries/common/passwords_and_secrets_in_infrastructure_code/test/positive7.yaml b/assets/queries/common/passwords_and_secrets_in_infrastructure_code/test/positive7.yaml deleted file mode 100644 index a24d930ba16..00000000000 --- a/assets/queries/common/passwords_and_secrets_in_infrastructure_code/test/positive7.yaml +++ /dev/null @@ -1,36 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: x -spec: - replicas: 5 - selector: - matchLabels: - app: x - template: - metadata: - labels: - app: x - spec: - containers: - - name: x - image: x - ports: - - containerPort: 5432 - env: - - name: PORT - value: "1234" - - name: DB_HOST - value: "127.0.0.1" - - name: DB_PORT - value: "23" - - name: DB_PORT_BD - value: "5432" - - name: DB_HOST_BD - value: "127.0.0.1" - - name: DB_NAME_BD - value: "dbx" - - name: DB_PASS_BD - value: "passx" - - name: DB_USER_BD - value: "userx" diff --git a/assets/queries/common/passwords_and_secrets_in_infrastructure_code/test/positive_expected_result.json b/assets/queries/common/passwords_and_secrets_in_infrastructure_code/test/positive_expected_result.json deleted file mode 100644 index f4477b6934b..00000000000 --- a/assets/queries/common/passwords_and_secrets_in_infrastructure_code/test/positive_expected_result.json +++ /dev/null @@ -1,92 +0,0 @@ -[ - { - "queryName": "Passwords And Secrets In Infrastructure Code", - "severity": "HIGH", - "line": 8, - "fileName": "positive1.yaml" - }, - { - "queryName": "Passwords And Secrets In Infrastructure Code", - "severity": "HIGH", - "line": 6, - "fileName": "positive2.yaml" - }, - { - "queryName": "Passwords And Secrets In Infrastructure Code", - "severity": "HIGH", - "line": 7, - "fileName": "positive3.yaml" - }, - { - "queryName": "Passwords And Secrets In Infrastructure Code", - "severity": "HIGH", - "line": 9, - "fileName": "positive4.tf" - }, - { - "queryName": "Passwords And Secrets In Infrastructure Code", - "severity": "HIGH", - "line": 29, - "fileName": "positive4.tf" - }, - { - "queryName": "Passwords And Secrets In Infrastructure Code", - "severity": "HIGH", - "line": 49, - "fileName": "positive4.tf" - }, - { - "queryName": "Passwords And Secrets In Infrastructure Code", - "severity": "HIGH", - "line": 69, - "fileName": "positive4.tf" - }, - { - "queryName": "Passwords And Secrets In Infrastructure Code", - "severity": "HIGH", - "line": 89, - "fileName": "positive4.tf" - }, - { - "queryName": "Passwords And Secrets In Infrastructure Code", - "severity": "HIGH", - "line": 116, - "fileName": "positive4.tf" - }, - { - "queryName": "Passwords And Secrets In Infrastructure Code", - "severity": "HIGH", - "line": 118, - "fileName": "positive4.tf" - }, - { - "queryName": "Passwords And Secrets In Infrastructure Code", - "severity": "HIGH", - "line": 3, - "fileName": "positive5.dockerfile" - }, - { - "queryName": "Passwords And Secrets In Infrastructure Code", - "severity": "HIGH", - "line": 7, - "fileName": "positive5.dockerfile" - }, - { - "queryName": "Passwords And Secrets In Infrastructure Code", - "severity": "HIGH", - "line": 3, - "fileName": "positive6.json" - }, - { - "queryName": "Passwords And Secrets In Infrastructure Code", - "severity": "HIGH", - "line": 6, - "fileName": "positive6.json" - }, - { - "queryName": "Passwords And Secrets In Infrastructure Code", - "severity": "HIGH", - "line": 33, - "fileName": "positive7.yaml" - } -] diff --git a/assets/queries/common/passwords_and_secrets_in_url/metadata.json b/assets/queries/common/passwords_and_secrets_in_url/metadata.json deleted file mode 100644 index ec808b62756..00000000000 --- a/assets/queries/common/passwords_and_secrets_in_url/metadata.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "id": "c09239d5-29d3-4dca-b829-f5553e6c0578", - "queryName": "Passwords And Secrets In URL", - "severity": "HIGH", - "category": "Secret Management", - "descriptionText": "Query to find passwords and secrets in URL", - "descriptionUrl": "https://kics.io/", - "platform": "Common", - "descriptionID": "6c7382ee", - "cloudProvider": "common" -} diff --git a/assets/queries/common/passwords_and_secrets_in_url/query.rego b/assets/queries/common/passwords_and_secrets_in_url/query.rego deleted file mode 100644 index f043cbd4bc6..00000000000 --- a/assets/queries/common/passwords_and_secrets_in_url/query.rego +++ /dev/null @@ -1,82 +0,0 @@ -package Cx - -import data.generic.common as commonLib - -# search for harcoded secrets by looking for their values with a special chars and length -CxPolicy[result] { - docs := input.document[_] - - [path, value] = walk(docs) - is_string(value) - checkObjects := prepare_object(path[minus(count(path), 1)], value) - checkObject := checkObjects[_] - check_vulnerability(checkObject) - allPath := [x | merge_path(path[i]) != ""; x := merge_path(path[i])] - result := { - "documentId": docs.id, - "searchKey": resolve_path(checkObject, allPath, value), - "issueType": "RedundantAttribute", - "keyExpectedValue": "Hardcoded secret key should not appear in source", - "keyActualValue": value, - } -} - -merge_path(pathItem) = item { - not is_string(pathItem) - item := "" -} else = item { - clearParse := ["playbooks", "tasks", "command", "original"] - commonLib.equalsOrInArray(clearParse, lower(pathItem)) - item := "" -} else = item { - contains(pathItem, ".") - item := sprintf("{{%s}}", [pathItem]) -} else = item { - item := pathItem -} - -resolve_path(obj, path, value) = resolved { - obj.id != "" - resolved := sprintf("FROM=%s.{{%s}}", [concat(".", path), obj.id]) -} else = resolved { - resolved := sprintf("%s=%s", [concat(".", path), value]) -} - -prepare_object(key, value) = obj { - #dockerfile - key == "Original" - args := split(value, " ") - obj := [x | x := create_docker_object(args[_], value)] -} else = obj { - obj := [{ - "key": key, - "value": value, - "id": "", - }] -} - -create_docker_object(value, original) = obj { - contains(value, "=") - splitted := split(value, "=") - count(splitted) > 1 - k := splitted[0] - is_string(k) - v := concat("", array.slice(splitted, 1, count(splitted))) - obj := { - "key": k, - "value": replace(v, "\"", ""), - "id": original, - } -} - -check_vulnerability(correctStrings) { - # password in url - count(regex.find_n("^[a-zA-Z]{3,10}://[^/\\s:@]{3,20}:[^/\\s:@]{3,20}@.{1,100}[\"'\\s]*", correctStrings.value, -1)) > 0 - true -} else { - # slack webhook - count(regex.find_n("^https://hooks.slack.com/services/T[a-zA-Z0-9_]{8}/B[a-zA-Z0-9_]{8}/[a-zA-Z0-9_]{24}", correctStrings.value, -1)) > 0 -} else { - # teams webhook - count(regex.find_n("^https://[a-zA-Z0-9_]{1,24}\\.webhook\\.office\\.com/webhookb2/[a-zA-Z0-9-]+(@[a-zA-Z0-9-]+)?/IncomingWebhook/[a-zA-Z0-9]+/[a-zA-Z0-9-]+", correctStrings.value, -1)) > 0 -} diff --git a/assets/queries/common/passwords_and_secrets_in_url/test/negative1.yaml b/assets/queries/common/passwords_and_secrets_in_url/test/negative1.yaml deleted file mode 100644 index 82dd71618ec..00000000000 --- a/assets/queries/common/passwords_and_secrets_in_url/test/negative1.yaml +++ /dev/null @@ -1,14 +0,0 @@ -apiVersion: v1 -kind: ClusterIssuer -metadata: - name: checkmarx - namespace: cx -spec: - acme: - server: https://check-v02.api.cx.org/directory - privateKeySecretRef: - name: cx-prod-site - solvers: - - http01: - ingress: - class: nginx diff --git a/assets/queries/common/passwords_and_secrets_in_url/test/negative2.yaml b/assets/queries/common/passwords_and_secrets_in_url/test/negative2.yaml deleted file mode 100644 index 6f45de38712..00000000000 --- a/assets/queries/common/passwords_and_secrets_in_url/test/negative2.yaml +++ /dev/null @@ -1,9 +0,0 @@ -AWSTemplateFormatVersion: "2010-09-09" -Resources: - myStackWithParams: - Type: AWS::CloudFormation::Stack - Properties: - TemplateURL: https://s3.amazonaws.com/cloudformation-templates-us-east-2/EC2ChooseAMI.template - Parameters: - InstanceType: t1.micro - KeyName: mykey diff --git a/assets/queries/common/passwords_and_secrets_in_url/test/negative3.yaml b/assets/queries/common/passwords_and_secrets_in_url/test/negative3.yaml deleted file mode 100644 index b6d6a6b0339..00000000000 --- a/assets/queries/common/passwords_and_secrets_in_url/test/negative3.yaml +++ /dev/null @@ -1,5 +0,0 @@ -- name: Download foo.conf - get_url: - url: http://example.com/path/file.conf - dest: /etc/foo.conf - mode: "0440" diff --git a/assets/queries/common/passwords_and_secrets_in_url/test/negative4.tf b/assets/queries/common/passwords_and_secrets_in_url/test/negative4.tf deleted file mode 100644 index abb9a176266..00000000000 --- a/assets/queries/common/passwords_and_secrets_in_url/test/negative4.tf +++ /dev/null @@ -1,26 +0,0 @@ -data "http" "example" { - url = "https://checkpoint-api.hashicorp.com/v1/check/terraform" - - # Optional request headers - request_headers = { - Accept = "application/json" - } -} - -data "http" "example_2" { - url = "https://checkpoint-api.hashicorp.com/v1/check/terraform" - - # Optional request headers - request_headers = { - Accept = "application/json" - } -} - -data "http" "example_3" { - url = "https://checkpoint-api.hashicorp.com/v1/check/terraform" - - # Optional request headers - request_headers = { - Accept = "application/json" - } -} diff --git a/assets/queries/common/passwords_and_secrets_in_url/test/negative5.dockerfile b/assets/queries/common/passwords_and_secrets_in_url/test/negative5.dockerfile deleted file mode 100644 index 5c147649f3d..00000000000 --- a/assets/queries/common/passwords_and_secrets_in_url/test/negative5.dockerfile +++ /dev/null @@ -1,3 +0,0 @@ -FROM baseImage - -RUN command diff --git a/assets/queries/common/passwords_and_secrets_in_url/test/positive1.yaml b/assets/queries/common/passwords_and_secrets_in_url/test/positive1.yaml deleted file mode 100644 index 3b372edacd6..00000000000 --- a/assets/queries/common/passwords_and_secrets_in_url/test/positive1.yaml +++ /dev/null @@ -1,30 +0,0 @@ -apiVersion: v1 -kind: ClusterIssuer -metadata: - name: checkmarx - namespace: cx -spec: - acme: - server: http://bob:sekret@example.invalid/some/path - privateKeySecretRef: - name: cx-prod-site - solvers: - - http01: - ingress: - class: nginx - acme_2: - server: https://hooks.slack.com/services/T00000000/B00000000/XXXXXXXXXXXXXXXXXXXXXXXX - privateKeySecretRef: - name: cx-prod-site-2 - solvers: - - http01: - ingress: - class: nginx - acme_3: - server: https://team_name.webhook.office.com/webhookb2/7aa49aa6-7840-443d-806c-08ebe8f59966@c662313f-14fc-43a2-9a7a-d2e27f4f3478/IncomingWebhook/8592f62b50cf41b9b93ba0c0a00a0b88/eff4cd58-1bb8-4899-94de-795f656b4a18 - privateKeySecretRef: - name: cx-prod-site-3 - solvers: - - http01: - ingress: - class: nginx diff --git a/assets/queries/common/passwords_and_secrets_in_url/test/positive2.yaml b/assets/queries/common/passwords_and_secrets_in_url/test/positive2.yaml deleted file mode 100644 index 5d7d934c78d..00000000000 --- a/assets/queries/common/passwords_and_secrets_in_url/test/positive2.yaml +++ /dev/null @@ -1,23 +0,0 @@ -AWSTemplateFormatVersion: "2010-09-09" -Resources: - myStackWithParams: - Type: AWS::CloudFormation::Stack - Properties: - TemplateURL: http://bob:sekret@example.invalid/some/path - Parameters: - InstanceType: t1.micro - KeyName: mykey - myStackWithParams_1: - Type: AWS::CloudFormation::Stack - Properties: - TemplateURL: https://hooks.slack.com/services/T00000000/B00000000/XXXXXXXXXXXXXXXXXXXXXXXX - Parameters: - InstanceType: t1.micro - KeyName: mykey - myStackWithParams_2: - Type: AWS::CloudFormation::Stack - Properties: - TemplateURL: https://team_name.webhook.office.com/webhookb2/7aa49aa6-7840-443d-806c-08ebe8f59966@c662313f-14fc-43a2-9a7a-d2e27f4f3478/IncomingWebhook/8592f62b50cf41b9b93ba0c0a00a0b88/eff4cd58-1bb8-4899-94de-795f656b4a18 - Parameters: - InstanceType: t1.micro - KeyName: mykey diff --git a/assets/queries/common/passwords_and_secrets_in_url/test/positive3.yaml b/assets/queries/common/passwords_and_secrets_in_url/test/positive3.yaml deleted file mode 100644 index c123eb358f5..00000000000 --- a/assets/queries/common/passwords_and_secrets_in_url/test/positive3.yaml +++ /dev/null @@ -1,17 +0,0 @@ -- name: example - get_url: - url: http://bob:sekret@example.invalid/some/path - dest: /etc/foo.conf - mode: "0440" - -- name: example_2 - get_url: - url: https://hooks.slack.com/services/T00000000/B00000000/XXXXXXXXXXXXXXXXXXXXXXXX - dest: /etc/foo.conf - mode: "0440" - -- name: example_3 - get_url: - url: https://team_name.webhook.office.com/webhookb2/7aa49aa6-7840-443d-806c-08ebe8f59966@c662313f-14fc-43a2-9a7a-d2e27f4f3478/IncomingWebhook/8592f62b50cf41b9b93ba0c0a00a0b88/eff4cd58-1bb8-4899-94de-795f656b4a18 - dest: /etc/foo.conf - mode: "0440" diff --git a/assets/queries/common/passwords_and_secrets_in_url/test/positive4.tf b/assets/queries/common/passwords_and_secrets_in_url/test/positive4.tf deleted file mode 100644 index 026683a62af..00000000000 --- a/assets/queries/common/passwords_and_secrets_in_url/test/positive4.tf +++ /dev/null @@ -1,26 +0,0 @@ -data "http" "example" { - url = "http://bob:sekret@example.invalid/some/path" - - # Optional request headers - request_headers = { - Accept = "application/json" - } -} - -data "http" "example_2" { - url = "https://hooks.slack.com/services/T00000000/B00000000/XXXXXXXXXXXXXXXXXXXXXXXX" - - # Optional request headers - request_headers = { - Accept = "application/json" - } -} - -data "http" "example_3" { - url = "https://team_name.webhook.office.com/webhookb2/7aa49aa6-7840-443d-806c-08ebe8f59966@c662313f-14fc-43a2-9a7a-d2e27f4f3478/IncomingWebhook/8592f62b50cf41b9b93ba0c0a00a0b88/eff4cd58-1bb8-4899-94de-795f656b4a18" - - # Optional request headers - request_headers = { - Accept = "application/json" - } -} diff --git a/assets/queries/common/passwords_and_secrets_in_url/test/positive5.dockerfile b/assets/queries/common/passwords_and_secrets_in_url/test/positive5.dockerfile deleted file mode 100644 index eda1a1679f6..00000000000 --- a/assets/queries/common/passwords_and_secrets_in_url/test/positive5.dockerfile +++ /dev/null @@ -1,4 +0,0 @@ -FROM test2 -ARG url="http://bob:sekret@example.invalid/some/path" -ARG url2="https://hooks.slack.com/services/T00000000/B00000000/XXXXXXXXXXXXXXXXXXXXXXXX" -ARG url3="https://team_name.webhook.office.com/webhookb2/7aa49aa6-7840-443d-806c-08ebe8f59966@c662313f-14fc-43a2-9a7a-d2e27f4f3478/IncomingWebhook/8592f62b50cf41b9b93ba0c0a00a0b88/eff4cd58-1bb8-4899-94de-795f656b4a18" diff --git a/assets/queries/common/passwords_and_secrets_in_url/test/positive_expected_result.json b/assets/queries/common/passwords_and_secrets_in_url/test/positive_expected_result.json deleted file mode 100644 index 807fe5c89a7..00000000000 --- a/assets/queries/common/passwords_and_secrets_in_url/test/positive_expected_result.json +++ /dev/null @@ -1,146 +0,0 @@ -[ - { - "queryName": "Passwords And Secrets In URL", - "severity": "HIGH", - "line": 8, - "fileName": "positive1.yaml" - }, - { - "queryName": "Passwords And Secrets In URL", - "severity": "HIGH", - "line": 16, - "fileName": "positive1.yaml" - }, - { - "queryName": "Passwords And Secrets In URL", - "severity": "HIGH", - "line": 24, - "fileName": "positive1.yaml" - }, - { - "queryName": "Passwords And Secrets In URL", - "severity": "HIGH", - "line": 6, - "fileName": "positive2.yaml" - }, - { - "queryName": "Passwords And Secrets In URL", - "severity": "HIGH", - "line": 13, - "fileName": "positive2.yaml" - }, - { - "queryName": "Passwords And Secrets In URL", - "severity": "HIGH", - "line": 20, - "fileName": "positive2.yaml" - }, - { - "queryName": "Passwords And Secrets In URL", - "severity": "HIGH", - "line": 3, - "fileName": "positive3.yaml" - }, - { - "queryName": "Passwords And Secrets In URL", - "severity": "HIGH", - "line": 9, - "fileName": "positive3.yaml" - }, - { - "queryName": "Passwords And Secrets In URL", - "severity": "HIGH", - "line": 15, - "fileName": "positive3.yaml" - }, - { - "queryName": "Passwords And Secrets In URL", - "severity": "HIGH", - "line": 2, - "fileName": "positive4.tf" - }, - { - "queryName": "Passwords And Secrets In URL", - "severity": "HIGH", - "line": 11, - "fileName": "positive4.tf" - }, - { - "queryName": "Passwords And Secrets In URL", - "severity": "HIGH", - "line": 20, - "fileName": "positive4.tf" - }, - { - "queryName": "Passwords And Secrets In URL", - "severity": "HIGH", - "line": 2, - "fileName": "positive5.dockerfile" - }, - { - "queryName": "Passwords And Secrets In URL", - "severity": "HIGH", - "line": 3, - "fileName": "positive5.dockerfile" - }, - { - "queryName": "Passwords And Secrets In URL", - "severity": "HIGH", - "line": 4, - "fileName": "positive5.dockerfile" - }, - { - "queryName": "Passwords And Secrets In URL", - "severity": "HIGH", - "line": 7, - "fileName": "positive6.yaml" - }, - { - "queryName": "Passwords And Secrets In URL", - "severity": "HIGH", - "line": 9, - "fileName": "positive6.yaml" - }, - { - "queryName": "Passwords And Secrets In URL", - "severity": "HIGH", - "line": 11, - "fileName": "positive6.yaml" - }, - { - "queryName": "Passwords And Secrets In URL", - "severity": "HIGH", - "line": 7, - "fileName": "positive7.json" - }, - { - "queryName": "Passwords And Secrets In URL", - "severity": "HIGH", - "line": 17, - "fileName": "positive7.json" - }, - { - "queryName": "Passwords And Secrets In URL", - "severity": "HIGH", - "line": 27, - "fileName": "positive7.json" - }, - { - "queryName": "Passwords And Secrets In URL", - "severity": "HIGH", - "line": 10, - "fileName": "positive8.json" - }, - { - "queryName": "Passwords And Secrets In URL", - "severity": "HIGH", - "line": 14, - "fileName": "positive8.json" - }, - { - "queryName": "Passwords And Secrets In URL", - "severity": "HIGH", - "line": 18, - "fileName": "positive8.json" - } -] diff --git a/assets/queries/terraform/aws/redshift_cluster_without_vpc/query.rego b/assets/queries/terraform/aws/redshift_cluster_without_vpc/query.rego index edd4acf11fe..9f459d96348 100644 --- a/assets/queries/terraform/aws/redshift_cluster_without_vpc/query.rego +++ b/assets/queries/terraform/aws/redshift_cluster_without_vpc/query.rego @@ -16,5 +16,6 @@ CxPolicy[result] { "issueType": "MissingAttribute", "keyExpectedValue": sprintf("aws_redshift_cluster[%s].%s is set", [name, attr]), "keyActualValue": sprintf("aws_redshift_cluster[%s].%s is undefined", [name, attr]), + "searchValue": sprintf("%s", [attr]), } } diff --git a/e2e/cli_test.go b/e2e/cli_test.go index c3ae4289760..3730a04be43 100644 --- a/e2e/cli_test.go +++ b/e2e/cli_test.go @@ -383,8 +383,17 @@ var tests = []testCase{ name: "E2E-CLI-022", args: args{ args: []cmdArgs{ - []string{"scan", "--profiling", "CPU", "-v", - "--no-progress", "--no-color", "-q", "../assets/queries", "-p", "fixtures/samples/terraform.tf"}, + []string{"scan", + "--profiling", + "CPU", + "-v", + "--no-progress", + "--no-color", + "-q", + "../assets/queries", + "-p", + "fixtures/samples/terraform.tf", + }, }, }, validation: func(outputText string) bool { @@ -616,8 +625,11 @@ var tests = []testCase{ args: args{ args: []cmdArgs{ - []string{"scan", "--exclude-results", "2abf26c3014fc445da69d8d5bb862c1c511e8e16ad3a6c6f6e14c28aa0adac1d," + - "d1c5f6aec84fd91ed24f5f06ccb8b6662e26c0202bcb5d4a58a1458c16456d20", + []string{"scan", + "--exclude-results", + "2abf26c3014fc445da69d8d5bb862c1c511e8e16ad3a6c6f6e14c28aa0adac1d," + + "4aa3f159f39767de53b49ed871977b8b499bf19b3b0865b1631042aa830598aa," + + "83461a5eac8fed2264fac68a6d352d1ed752867a9b0a131afa9ba7e366159b59", "-q", "../assets/queries", "-p", "fixtures/samples/terraform-single.tf"}, []string{"scan", "--exclude-results", "-q", "../assets/queries", "-p", "fixtures/samples/terraform-single.tf"}, diff --git a/e2e/fixtures/E2E_CLI_032_RESULT.json b/e2e/fixtures/E2E_CLI_032_RESULT.json index ebfa21446ba..caedac0f41a 100644 --- a/e2e/fixtures/E2E_CLI_032_RESULT.json +++ b/e2e/fixtures/E2E_CLI_032_RESULT.json @@ -3,13 +3,26 @@ "files_scanned": 1, "files_parsed": 1, "files_failed_to_scan": 0, - "queries_total": 588, + "queries_total": 785, "queries_failed_to_execute": 0, "queries_failed_to_compute_similarity_id": 0, + "scan_id": "console", + "severity_counters": { + "HIGH": 6, + "INFO": 2, + "LOW": 0, + "MEDIUM": 6 + }, + "total_counter": 14, + "start": "2021-09-10T13:49:52.586954+01:00", + "end": "2021-09-10T13:49:56.096989+01:00", + "paths": [ + "fixtures/samples/terraform.tf" + ], "queries": [ { - "query_name": "Passwords And Secrets In Infrastructure Code", - "query_id": "f996f3cb-00fc-480c-8973-8ab04d44a8cc", + "query_name": "Passwords And Secrets - Generic Password", + "query_id": "487f4be7-3fd9-4506-a07a-eae252180c08", "query_url": "https://kics.io/", "severity": "HIGH", "platform": "Common", @@ -22,26 +35,26 @@ "files": [ { "file_name": "fixtures/samples/terraform.tf", - "similarity_id": "ad5ddbe84fe54d121c8ef856217e0184958db4bf4c4e472f99c31718427b9053", - "line": 14, + "similarity_id": "9e26d1ce4d2e0f7fa9b77195bd329f18c135b946ba74a13bc05a289dfc3455f1", + "line": 5, "issue_type": "RedundantAttribute", - "search_key": "resource.aws_redshift_cluster.default1.master_password", + "search_key": "", "search_line": 0, "search_value": "", "expected_value": "Hardcoded secret key should not appear in source", - "actual_value": "Mustbe8characters", + "actual_value": "' master_password = \"Mustbe8characters\"' contains a secret", "value": null }, { "file_name": "fixtures/samples/terraform.tf", - "similarity_id": "5f3789ae5dac05a64bba584fe201f769786cdaa5a8b7a32aaa057476c65535e2", - "line": 5, + "similarity_id": "d6a018d85a93d338ed89c82b791f30c1913eff5e743f67cfa52176f5135aea2b", + "line": 14, "issue_type": "RedundantAttribute", - "search_key": "resource.aws_redshift_cluster.default.master_password", + "search_key": "", "search_line": 0, "search_value": "", "expected_value": "Hardcoded secret key should not appear in source", - "actual_value": "Mustbe8characters", + "actual_value": "' master_password = \"Mustbe8characters\"' contains a secret", "value": null } ] @@ -139,10 +152,10 @@ "files": [ { "file_name": "fixtures/samples/terraform.tf", - "similarity_id": "2abf26c3014fc445da69d8d5bb862c1c511e8e16ad3a6c6f6e14c28aa0adac1d", - "line": 10, + "similarity_id": "0455ad9d92fa1dc1cbf20dd5042ee21d9ae176388662b5982501aa01724e50d9", + "line": 1, "issue_type": "MissingAttribute", - "search_key": "aws_redshift_cluster[default1]", + "search_key": "aws_redshift_cluster[default]", "search_line": 0, "search_value": "", "expected_value": "'aws_redshift_cluster.logging' is true", @@ -151,10 +164,10 @@ }, { "file_name": "fixtures/samples/terraform.tf", - "similarity_id": "0455ad9d92fa1dc1cbf20dd5042ee21d9ae176388662b5982501aa01724e50d9", - "line": 1, + "similarity_id": "2abf26c3014fc445da69d8d5bb862c1c511e8e16ad3a6c6f6e14c28aa0adac1d", + "line": 10, "issue_type": "MissingAttribute", - "search_key": "aws_redshift_cluster[default]", + "search_key": "aws_redshift_cluster[default1]", "search_line": 0, "search_value": "", "expected_value": "'aws_redshift_cluster.logging' is true", @@ -178,50 +191,50 @@ "files": [ { "file_name": "fixtures/samples/terraform.tf", - "similarity_id": "e4d7e3bd4992042d27482311989d6224a553385eb5bcc0988c90c1c10bd99e8c", + "similarity_id": "709853fdb034e451c68825041190bbff098e2893528d91c39d84d31ea93ecae6", "line": 1, "issue_type": "MissingAttribute", "search_key": "aws_redshift_cluster[default]", "search_line": 0, - "search_value": "", + "search_value": "cluster_subnet_group_name", "expected_value": "aws_redshift_cluster[default].cluster_subnet_group_name is set", "actual_value": "aws_redshift_cluster[default].cluster_subnet_group_name is undefined", "value": null }, { "file_name": "fixtures/samples/terraform.tf", - "similarity_id": "e4d7e3bd4992042d27482311989d6224a553385eb5bcc0988c90c1c10bd99e8c", + "similarity_id": "c703e26654dc3e9da1ad3519663f38aed2a29e629b4342f9e75af464a07699e0", "line": 1, "issue_type": "MissingAttribute", "search_key": "aws_redshift_cluster[default]", "search_line": 0, - "search_value": "", + "search_value": "vpc_security_group_ids", "expected_value": "aws_redshift_cluster[default].vpc_security_group_ids is set", "actual_value": "aws_redshift_cluster[default].vpc_security_group_ids is undefined", "value": null }, { "file_name": "fixtures/samples/terraform.tf", - "similarity_id": "d1c5f6aec84fd91ed24f5f06ccb8b6662e26c0202bcb5d4a58a1458c16456d20", + "similarity_id": "83461a5eac8fed2264fac68a6d352d1ed752867a9b0a131afa9ba7e366159b59", "line": 10, "issue_type": "MissingAttribute", "search_key": "aws_redshift_cluster[default1]", "search_line": 0, - "search_value": "", - "expected_value": "aws_redshift_cluster[default1].cluster_subnet_group_name is set", - "actual_value": "aws_redshift_cluster[default1].cluster_subnet_group_name is undefined", + "search_value": "vpc_security_group_ids", + "expected_value": "aws_redshift_cluster[default1].vpc_security_group_ids is set", + "actual_value": "aws_redshift_cluster[default1].vpc_security_group_ids is undefined", "value": null }, { "file_name": "fixtures/samples/terraform.tf", - "similarity_id": "d1c5f6aec84fd91ed24f5f06ccb8b6662e26c0202bcb5d4a58a1458c16456d20", + "similarity_id": "4aa3f159f39767de53b49ed871977b8b499bf19b3b0865b1631042aa830598aa", "line": 10, "issue_type": "MissingAttribute", "search_key": "aws_redshift_cluster[default1]", "search_line": 0, - "search_value": "", - "expected_value": "aws_redshift_cluster[default1].vpc_security_group_ids is set", - "actual_value": "aws_redshift_cluster[default1].vpc_security_group_ids is undefined", + "search_value": "cluster_subnet_group_name", + "expected_value": "aws_redshift_cluster[default1].cluster_subnet_group_name is set", + "actual_value": "aws_redshift_cluster[default1].cluster_subnet_group_name is undefined", "value": null } ] @@ -265,18 +278,5 @@ } ] } - ], - "scan_id": "console", - "severity_counters": { - "HIGH": 6, - "INFO": 2, - "LOW": 0, - "MEDIUM": 6 - }, - "total_counter": 14, - "start": "2021-07-23T17:34:40.1474388+01:00", - "end": "2021-07-23T17:35:32.3344518+01:00", - "paths": [ - "fixtures/samples/terraform.tf" ] } diff --git a/e2e/fixtures/E2E_CLI_033_RESULT.json b/e2e/fixtures/E2E_CLI_033_RESULT.json index 824b1aa6bbe..3ad7c987eb1 100644 --- a/e2e/fixtures/E2E_CLI_033_RESULT.json +++ b/e2e/fixtures/E2E_CLI_033_RESULT.json @@ -3,9 +3,22 @@ "files_scanned": 1, "files_parsed": 1, "files_failed_to_scan": 0, - "queries_total": 588, + "queries_total": 785, "queries_failed_to_execute": 0, "queries_failed_to_compute_similarity_id": 0, + "scan_id": "console", + "severity_counters": { + "HIGH": 0, + "INFO": 1, + "LOW": 0, + "MEDIUM": 3 + }, + "total_counter": 4, + "start": "2021-09-10T13:55:18.709293+01:00", + "end": "2021-09-10T13:55:22.21519+01:00", + "paths": [ + "fixtures/samples/terraform-single.tf" + ], "queries": [ { "query_name": "Redshift Cluster Logging Disabled", @@ -49,24 +62,24 @@ "files": [ { "file_name": "fixtures/samples/terraform-single.tf", - "similarity_id": "d1c5f6aec84fd91ed24f5f06ccb8b6662e26c0202bcb5d4a58a1458c16456d20", + "similarity_id": "4aa3f159f39767de53b49ed871977b8b499bf19b3b0865b1631042aa830598aa", "line": 1, "issue_type": "MissingAttribute", "search_key": "aws_redshift_cluster[default1]", "search_line": 0, - "search_value": "", + "search_value": "cluster_subnet_group_name", "expected_value": "aws_redshift_cluster[default1].cluster_subnet_group_name is set", "actual_value": "aws_redshift_cluster[default1].cluster_subnet_group_name is undefined", "value": null }, { "file_name": "fixtures/samples/terraform-single.tf", - "similarity_id": "d1c5f6aec84fd91ed24f5f06ccb8b6662e26c0202bcb5d4a58a1458c16456d20", + "similarity_id": "83461a5eac8fed2264fac68a6d352d1ed752867a9b0a131afa9ba7e366159b59", "line": 1, "issue_type": "MissingAttribute", "search_key": "aws_redshift_cluster[default1]", "search_line": 0, - "search_value": "", + "search_value": "vpc_security_group_ids", "expected_value": "aws_redshift_cluster[default1].vpc_security_group_ids is set", "actual_value": "aws_redshift_cluster[default1].vpc_security_group_ids is undefined", "value": null @@ -100,18 +113,5 @@ } ] } - ], - "scan_id": "console", - "severity_counters": { - "HIGH": 0, - "INFO": 1, - "LOW": 0, - "MEDIUM": 3 - }, - "total_counter": 4, - "start": "2021-07-23T17:40:14.888483+01:00", - "end": "2021-07-23T17:41:02.3078595+01:00", - "paths": [ - "fixtures/samples/terraform-single.tf" ] } diff --git a/e2e/fixtures/assets/scan_help b/e2e/fixtures/assets/scan_help index 98344c0d6d0..488bff5a1c8 100644 --- a/e2e/fixtures/assets/scan_help +++ b/e2e/fixtures/assets/scan_help @@ -2,53 +2,55 @@ Usage: kics scan [flags] Flags: - --cloud-provider strings list of cloud providers to scan (aws, azure, gcp) - --config string path to configuration file - --disable-full-descriptions disable request for full descriptions and use default vulnerability descriptions - --exclude-categories strings exclude categories by providing its name - cannot be provided with query inclusion flags - can be provided multiple times or as a comma separated string - example: 'Access control,Best practices' - -e, --exclude-paths strings exclude paths from scan - supports glob and can be provided multiple times or as a quoted comma separated string - example: './shouldNotScan/*,somefile.txt' - --exclude-queries strings exclude queries by providing the query ID - cannot be provided with query inclusion flags - can be provided multiple times or as a comma separated string - example: 'e69890e6-fce5-461d-98ad-cb98318dfc96,4728cd65-a20c-49da-8b31-9c08b423e4db' - -x, --exclude-results strings exclude results by providing the similarity ID of a result - can be provided multiple times or as a comma separated string - example: 'fec62a97d569662093dbb9739360942f...,31263s5696620s93dbb973d9360942fc2a...' - --exclude-severities strings exclude results by providing the severity of a result - can be provided multiple times or as a comma separated string - example: 'info,low' - --fail-on strings which kind of results should return an exit code different from 0 - accepts: high, medium, low and info - example: "high,low" (default [high,medium,low,info]) - -h, --help help for scan - --ignore-on-exit string defines which kind of non-zero exits code should be ignored - accepts: all, results, errors, none - example: if 'results' is set, only engine errors will make KICS exit code different from 0 (default "none") - -i, --include-queries strings include queries by providing the query ID - cannot be provided with query exclusion flags - can be provided multiple times or as a comma separated string - example: 'e69890e6-fce5-461d-98ad-cb98318dfc96,4728cd65-a20c-49da-8b31-9c08b423e4db' - --input-data string path to query input data files - -b, --libraries-path string path to directory with libraries (default "./assets/libraries") - --minimal-ui simplified version of CLI output - --no-progress hides the progress bar - --output-name string name used on report creations (default "results") - -o, --output-path string directory path to store reports - -p, --path strings paths or directories to scan - example: "./somepath,somefile.txt" - --payload-lines adds line information inside the payload when printing the payload file - -d, --payload-path string path to store internal representation JSON file - --preview-lines int number of lines to be display in CLI results (min: 1, max: 30) (default 3) - -q, --queries-path string path to directory with queries (default "./assets/queries") - --report-formats strings formats in which the results will be exported (all, glsast, html, json, pdf, sarif) (default [json]) - --timeout int number of seconds the query has to execute before being canceled (default 60) - -t, --type strings case insensitive list of platform types to scan - (Ansible, AzureResourceManager, CloudFormation, Dockerfile, Kubernetes, OpenAPI, Terraform) + --cloud-provider strings list of cloud providers to scan (aws, azure, gcp) + --config string path to configuration file + --disable-full-descriptions disable request for full descriptions and use default vulnerability descriptions + --disable-secrets disable secrets scanning + --exclude-categories strings exclude categories by providing its name + cannot be provided with query inclusion flags + can be provided multiple times or as a comma separated string + example: 'Access control,Best practices' + -e, --exclude-paths strings exclude paths from scan + supports glob and can be provided multiple times or as a quoted comma separated string + example: './shouldNotScan/*,somefile.txt' + --exclude-queries strings exclude queries by providing the query ID + cannot be provided with query inclusion flags + can be provided multiple times or as a comma separated string + example: 'e69890e6-fce5-461d-98ad-cb98318dfc96,4728cd65-a20c-49da-8b31-9c08b423e4db' + -x, --exclude-results strings exclude results by providing the similarity ID of a result + can be provided multiple times or as a comma separated string + example: 'fec62a97d569662093dbb9739360942f...,31263s5696620s93dbb973d9360942fc2a...' + --exclude-severities strings exclude results by providing the severity of a result + can be provided multiple times or as a comma separated string + example: 'info,low' + --fail-on strings which kind of results should return an exit code different from 0 + accepts: high, medium, low and info + example: "high,low" (default [high,medium,low,info]) + -h, --help help for scan + --ignore-on-exit string defines which kind of non-zero exits code should be ignored + accepts: all, results, errors, none + example: if 'results' is set, only engine errors will make KICS exit code different from 0 (default "none") + -i, --include-queries strings include queries by providing the query ID + cannot be provided with query exclusion flags + can be provided multiple times or as a comma separated string + example: 'e69890e6-fce5-461d-98ad-cb98318dfc96,4728cd65-a20c-49da-8b31-9c08b423e4db' + --input-data string path to query input data files + -b, --libraries-path string path to directory with libraries (default "./assets/libraries") + --minimal-ui simplified version of CLI output + --no-progress hides the progress bar + --output-name string name used on report creations (default "results") + -o, --output-path string directory path to store reports + -p, --path strings paths or directories to scan + example: "./somepath,somefile.txt" + --payload-lines adds line information inside the payload when printing the payload file + -d, --payload-path string path to store internal representation JSON file + --preview-lines int number of lines to be display in CLI results (min: 1, max: 30) (default 3) + -q, --queries-path string path to directory with queries (default "./assets/queries") + --report-formats strings formats in which the results will be exported (all, glsast, html, json, pdf, sarif) (default [json]) + -r, --secrets-regexes-path string path to secrets regex rules configuration file + --timeout int number of seconds the query has to execute before being canceled (default 60) + -t, --type strings case insensitive list of platform types to scan + (Ansible, AzureResourceManager, CloudFormation, Dockerfile, Kubernetes, OpenAPI, Terraform) Global Flags: --ci display only log messages to CLI output (mutually exclusive with silent) diff --git a/go.mod b/go.mod index 261f2deb0fd..de500f4c75c 100644 --- a/go.mod +++ b/go.mod @@ -31,6 +31,7 @@ require ( github.com/xeipuuv/gojsonschema v1.2.0 github.com/zclconf/go-cty v1.9.1 golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4 + golang.org/x/term v0.0.0-20210220032956-6a3ed077a48d gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b helm.sh/helm/v3 v3.6.3 ) diff --git a/internal/console/assets/scan-flags.json b/internal/console/assets/scan-flags.json index ea462f1bf13..1e31b64640e 100644 --- a/internal/console/assets/scan-flags.json +++ b/internal/console/assets/scan-flags.json @@ -155,6 +155,18 @@ "usage": "formats in which the results will be exported (${supportedReports})", "validation": "validateMultiStrEnum" }, + "secrets-regexes-path": { + "flagType": "str", + "shorthandFlag": "r", + "defaultValue": "", + "usage": "path to secrets regex rules configuration file" + }, + "disable-secrets": { + "flagType": "bool", + "shorthandFlag": "", + "defaultValue": "false", + "usage": "disable secrets scanning" + }, "timeout": { "flagType": "int", "shorthandFlag": "", diff --git a/internal/console/flags/scan_flags.go b/internal/console/flags/scan_flags.go index b0edaabcb6e..10e83fe35ce 100644 --- a/internal/console/flags/scan_flags.go +++ b/internal/console/flags/scan_flags.go @@ -2,30 +2,32 @@ package flags // Flags constants for scan const ( - CloudProviderFlag = "cloud-provider" - ConfigFlag = "config" - DisableCISDescFlag = "disable-cis-descriptions" - DisableFullDescFlag = "disable-full-descriptions" - ExcludeCategoriesFlag = "exclude-categories" - ExcludePathsFlag = "exclude-paths" - ExcludeQueriesFlag = "exclude-queries" - ExcludeResultsFlag = "exclude-results" - ExcludeSeveritiesFlag = "exclude-severities" - IncludeQueriesFlag = "include-queries" - InputDataFlag = "input-data" - FailOnFlag = "fail-on" - IgnoreOnExitFlag = "ignore-on-exit" - MinimalUIFlag = "minimal-ui" - NoProgressFlag = "no-progress" - OutputNameFlag = "output-name" - OutputPathFlag = "output-path" - PathFlag = "path" - PayloadPathFlag = "payload-path" - PreviewLinesFlag = "preview-lines" - QueriesPath = "queries-path" - LibrariesPath = "libraries-path" - ReportFormatsFlag = "report-formats" - TypeFlag = "type" - QueryExecTimeoutFlag = "timeout" - LineInfoPayloadFlag = "payload-lines" + CloudProviderFlag = "cloud-provider" + ConfigFlag = "config" + DisableCISDescFlag = "disable-cis-descriptions" + DisableFullDescFlag = "disable-full-descriptions" + ExcludeCategoriesFlag = "exclude-categories" + ExcludePathsFlag = "exclude-paths" + ExcludeQueriesFlag = "exclude-queries" + ExcludeResultsFlag = "exclude-results" + ExcludeSeveritiesFlag = "exclude-severities" + IncludeQueriesFlag = "include-queries" + InputDataFlag = "input-data" + FailOnFlag = "fail-on" + IgnoreOnExitFlag = "ignore-on-exit" + MinimalUIFlag = "minimal-ui" + NoProgressFlag = "no-progress" + OutputNameFlag = "output-name" + OutputPathFlag = "output-path" + PathFlag = "path" + PayloadPathFlag = "payload-path" + PreviewLinesFlag = "preview-lines" + QueriesPath = "queries-path" + LibrariesPath = "libraries-path" + ReportFormatsFlag = "report-formats" + TypeFlag = "type" + QueryExecTimeoutFlag = "timeout" + LineInfoPayloadFlag = "payload-lines" + DisableSecretsFlag = "disable-secrets" + SecretsRegexesPathFlag = "secrets-regexes-path" //nolint:gosec ) diff --git a/internal/console/kics_test.go b/internal/console/kics_test.go index 784e03d239d..74ce0880b18 100644 --- a/internal/console/kics_test.go +++ b/internal/console/kics_test.go @@ -16,46 +16,81 @@ func TestConsole_Execute(t *testing.T) { //nolint }{ { name: "test_kics", - args: []string{"kics", "scan", "--path", filepath.FromSlash("../../test/fixtures/tc-sim01/positive1.tf"), - "-q", filepath.FromSlash("../../assets/queries/terraform/aws/alb_is_not_integrated_with_waf")}, + args: []string{"kics", + "scan", + "--disable-secrets", + "--path", + filepath.FromSlash("../../test/fixtures/tc-sim01/positive1.tf"), + "-q", + filepath.FromSlash("../../assets/queries/terraform/aws/alb_is_not_integrated_with_waf"), + }, wantErr: false, remove: "", }, { name: "test_kics_output_flag", - args: []string{"kics", "scan", "-p", filepath.FromSlash("../../test/fixtures/tc-sim01/positive1.tf"), - "-q", filepath.FromSlash("../../assets/queries/terraform/aws/alb_is_not_integrated_with_waf"), "-o", "results.json"}, + args: []string{"kics", + "scan", + "--disable-secrets", + "-p", + filepath.FromSlash("../../test/fixtures/tc-sim01/positive1.tf"), + "-q", + filepath.FromSlash("../../assets/queries/terraform/aws/alb_is_not_integrated_with_waf"), + "-o", + "results.json", + }, wantErr: false, remove: "results.json", }, { name: "test_kics_payload_flag", - args: []string{"kics", "scan", "-p", filepath.FromSlash("../../test/fixtures/tc-sim01/positive1.tf"), "-q", - filepath.FromSlash("../../assets/queries/terraform/aws/alb_is_not_integrated_with_waf"), "-d", "payload.json"}, + args: []string{"kics", + "scan", + "--disable-secrets", + "-p", + filepath.FromSlash("../../test/fixtures/tc-sim01/positive1.tf"), + "-q", + filepath.FromSlash("../../assets/queries/terraform/aws/alb_is_not_integrated_with_waf"), + "-d", + "payload.json", + }, wantErr: false, remove: "payload.json", }, { name: "test_kics_exclude_flag", - args: []string{"kics", "scan", "-p", filepath.FromSlash("../../test/fixtures/tc-sim01"), "-q", + args: []string{"kics", + "scan", + "--disable-secrets", + "-p", + filepath.FromSlash("../../test/fixtures/tc-sim01"), + "-q", filepath.FromSlash("../../assets/queries/terraform/aws/alb_is_not_integrated_with_waf"), - "-e", filepath.FromSlash("../../test/fixtures/tc-sim01/positive1.tf")}, + "-e", + filepath.FromSlash("../../test/fixtures/tc-sim01/positive1.tf"), + }, wantErr: false, remove: "", }, { name: "test_kics_exclude_results_flag", - args: []string{"kics", "scan", "-p", filepath.FromSlash("../../test/fixtures/tc-sim01/positive1.tf"), "-q", + args: []string{"kics", + "scan", + "--disable-secrets", + "-p", + filepath.FromSlash("../../test/fixtures/tc-sim01/positive1.tf"), + "-q", filepath.FromSlash("../../assets/queries/terraform/aws/alb_is_not_integrated_with_waf"), - "-x", "c8f2b4b2a74bca2aa6d94336c144f9713524b745c1a3590e6492e98d819e352d"}, + "-x", "c8f2b4b2a74bca2aa6d94336c144f9713524b745c1a3590e6492e98d819e352d", + }, wantErr: false, remove: "", }, { name: "test_kics_multiple_paths", - args: []string{ - "kics", + args: []string{"kics", "scan", + "--disable-secrets", "-p", fmt.Sprintf("%s,%s", filepath.FromSlash("../../test/fixtures/tc-sim01/positive1.tf"), @@ -67,17 +102,30 @@ func TestConsole_Execute(t *testing.T) { //nolint }, { name: "test_kics_config_flag", - args: []string{"kics", "scan", "-p", filepath.FromSlash("../../test/fixtures/config"), "-q", + args: []string{"kics", + "scan", + "--disable-secrets", + "-p", + filepath.FromSlash("../../test/fixtures/config"), + "-q", filepath.FromSlash("../../assets/queries/terraform/aws/alb_is_not_integrated_with_waf"), - "--config", filepath.FromSlash("../../test/fixtures/config/kics.config_json")}, + "--config", + filepath.FromSlash("../../test/fixtures/config/kics.config_json"), + }, wantErr: false, remove: "", }, { name: "test_kics_unknown_config_flag", - args: []string{"kics", "scan", "-p", filepath.FromSlash("../../test/fixtures/config"), "-q", + args: []string{"kics", + "scan", + "-p", + filepath.FromSlash("../../test/fixtures/config"), + "-q", filepath.FromSlash("../../assets/queries/terraform/aws/alb_is_not_integrated_with_waf"), - "--config", filepath.FromSlash("../../test/fixtures/config/kics_unknown.config_json")}, + "--config", + filepath.FromSlash("../../test/fixtures/config/kics_unknown.config_json"), + }, wantErr: true, remove: "", }, diff --git a/internal/console/scan.go b/internal/console/scan.go index 81d09562c4a..051325d8af8 100644 --- a/internal/console/scan.go +++ b/internal/console/scan.go @@ -10,6 +10,7 @@ import ( "syscall" "time" + "github.com/Checkmarx/kics/assets" "github.com/Checkmarx/kics/internal/console/flags" consoleHelpers "github.com/Checkmarx/kics/internal/console/helpers" internalPrinter "github.com/Checkmarx/kics/internal/console/printer" @@ -21,6 +22,7 @@ import ( "github.com/Checkmarx/kics/pkg/descriptions" "github.com/Checkmarx/kics/pkg/engine" "github.com/Checkmarx/kics/pkg/engine/provider" + "github.com/Checkmarx/kics/pkg/engine/secrets" "github.com/Checkmarx/kics/pkg/engine/source" "github.com/Checkmarx/kics/pkg/kics" "github.com/Checkmarx/kics/pkg/model" @@ -40,6 +42,7 @@ import ( "github.com/spf13/cobra" "github.com/spf13/pflag" "github.com/spf13/viper" + "golang.org/x/term" ) var ( @@ -298,39 +301,6 @@ func getExcludeResultsMap(excludeResults []string) map[string]bool { return excludeResultsMap } -func createInspector(t engine.Tracker, querySource source.QueriesSource) (*engine.Inspector, error) { - excludeResultsMap := getExcludeResultsMap(flags.GetMultiStrFlag(flags.ExcludeResultsFlag)) - - excludeQueries := source.ExcludeQueries{ - ByIDs: flags.GetMultiStrFlag(flags.ExcludeQueriesFlag), - ByCategories: flags.GetMultiStrFlag(flags.ExcludeCategoriesFlag), - BySeverities: flags.GetMultiStrFlag(flags.ExcludeSeveritiesFlag), - } - - includeQueries := source.IncludeQueries{ - ByIDs: flags.GetMultiStrFlag(flags.IncludeQueriesFlag), - } - - queryFilter := source.QueryInspectorParameters{ - IncludeQueries: includeQueries, - ExcludeQueries: excludeQueries, - InputDataPath: flags.GetStrFlag(flags.InputDataFlag), - } - - inspector, err := engine.NewInspector(ctx, - querySource, - engine.DefaultVulnerabilityBuilder, - t, - &queryFilter, - excludeResultsMap, - flags.GetIntFlag(flags.QueryExecTimeoutFlag), - ) - if err != nil { - return nil, err - } - return inspector, nil -} - // analyzePaths will analyze the paths to scan to determine which type of queries to load // and which files should be ignored, it then updates the types and exclude flags variables // with the results found @@ -349,7 +319,9 @@ func analyzePaths(paths, types, exclude []string) (typesRes, excludeRes []string return types, exclude, nil } -func createService(inspector *engine.Inspector, +func createService( + inspector *engine.Inspector, + secretsInspector *secrets.Inspector, paths []string, t kics.Tracker, store kics.Storage, @@ -380,14 +352,18 @@ func createService(inspector *engine.Inspector, services := make([]*kics.Service, 0, len(combinedParser)) for _, parser := range combinedParser { - services = append(services, &kics.Service{ - SourceProvider: filesSource, - Storage: store, - Parser: parser, - Inspector: inspector, - Tracker: t, - Resolver: combinedResolver, - }) + services = append( + services, + &kics.Service{ + SourceProvider: filesSource, + Storage: store, + Parser: parser, + Inspector: inspector, + SecretsInspector: secretsInspector, + Tracker: t, + Resolver: combinedResolver, + }, + ) } return services, nil } @@ -395,31 +371,105 @@ func createService(inspector *engine.Inspector, type startServiceParameters struct { t *tracker.CITracker store kics.Storage - querySource *source.FilesystemSource extractedPaths []string progressBar progress.PBar pbBuilder *progress.PbBuilder + excludeResults map[string]bool } -func createServiceAndStartScan(params *startServiceParameters) (*engine.Inspector, error) { - inspector, err := createInspector(params.t, params.querySource) +func createQueryFilter() *source.QueryInspectorParameters { + excludeQueries := source.ExcludeQueries{ + ByIDs: flags.GetMultiStrFlag(flags.ExcludeQueriesFlag), + ByCategories: flags.GetMultiStrFlag(flags.ExcludeCategoriesFlag), + BySeverities: flags.GetMultiStrFlag(flags.ExcludeSeveritiesFlag), + } + + includeQueries := source.IncludeQueries{ + ByIDs: flags.GetMultiStrFlag(flags.IncludeQueriesFlag), + } + + queryFilter := source.QueryInspectorParameters{ + IncludeQueries: includeQueries, + ExcludeQueries: excludeQueries, + InputDataPath: flags.GetStrFlag(flags.InputDataFlag), + } + + return &queryFilter +} + +func getSecretsRegexRules(regexRulesPath string) (regexRulesContent string, err error) { + if len(regexRulesPath) > 0 { + b, err := os.ReadFile(regexRulesPath) + if err != nil { + return regexRulesContent, err + } + regexRulesContent = string(b) + } else { + regexRulesContent = assets.SecretsQueryRegexRulesJSON + } + + return regexRulesContent, nil +} + +func createServiceAndStartScan(params *startServiceParameters) (failedQueries map[string]error, err error) { + querySource := source.NewFilesystemSource( + flags.GetStrFlag(flags.QueriesPath), + flags.GetMultiStrFlag(flags.TypeFlag), + flags.GetMultiStrFlag(flags.CloudProviderFlag), + flags.GetStrFlag(flags.LibrariesPath)) + + queryFilter := createQueryFilter() + inspector, err := engine.NewInspector(ctx, + querySource, + engine.DefaultVulnerabilityBuilder, + params.t, + queryFilter, + params.excludeResults, + flags.GetIntFlag(flags.QueryExecTimeoutFlag), + ) + if err != nil { + return failedQueries, err + } + + secretsRegexRulesContent, err := getSecretsRegexRules(flags.GetStrFlag(flags.SecretsRegexesPathFlag)) + if err != nil { + return failedQueries, err + } + + secretsInspector, err := secrets.NewInspector( + ctx, + params.excludeResults, + params.t, + queryFilter, + flags.GetBoolFlag(flags.DisableSecretsFlag), + flags.GetIntFlag(flags.QueryExecTimeoutFlag), + secretsRegexRulesContent, + ) if err != nil { log.Err(err) - return &engine.Inspector{}, err + return failedQueries, err } - services, err := createService(inspector, params.extractedPaths, params.t, params.store, params.querySource) + services, err := createService( + inspector, + secretsInspector, + params.extractedPaths, + params.t, + params.store, + querySource, + ) if err != nil { log.Err(err) - return &engine.Inspector{}, err + return failedQueries, err } params.progressBar.Close() - if err = scanner.StartScan(ctx, scanID, *params.pbBuilder, services); err != nil { + if err = scanner.PrepareAndScan(ctx, scanID, *params.pbBuilder, services); err != nil { log.Err(err) - return &engine.Inspector{}, err + return failedQueries, err } - return inspector, nil + failedQueries = inspector.GetFailedQueries() + return failedQueries, nil } func resolvePath(flagName string) (string, error) { @@ -476,6 +526,31 @@ func preparePaths(changedDefaultQueryPath, changedDefaultLibrariesPath bool) err return nil } +func prepareAndAnalyzePaths(changedDefaultQueryPath, changedDefaultLibrariesPath bool) (extractedPaths provider.ExtractedPath, err error) { + err = preparePaths(changedDefaultQueryPath, changedDefaultLibrariesPath) + if err != nil { + return extractedPaths, err + } + + extractedPaths, err = provider.GetSources(flags.GetMultiStrFlag(flags.PathFlag)) + if err != nil { + return extractedPaths, err + } + + newTypeFlagValue, newExcludePathsFlagValue, errAnalyze := + analyzePaths( + extractedPaths.Path, + flags.GetMultiStrFlag(flags.TypeFlag), + flags.GetMultiStrFlag(flags.ExcludePathsFlag), + ) + if errAnalyze != nil { + return extractedPaths, errAnalyze + } + flags.SetMultiStrFlag(flags.TypeFlag, newTypeFlagValue) + flags.SetMultiStrFlag(flags.ExcludePathsFlag, newExcludePathsFlagValue) + return extractedPaths, nil +} + func scan(changedDefaultQueryPath, changedDefaultLibrariesPath bool) error { log.Debug().Msg("console.scan()") for _, warn := range warnings { @@ -489,8 +564,13 @@ func scan(changedDefaultQueryPath, changedDefaultLibrariesPath bool) error { fmt.Println(versionMsg) log.Info().Msgf(strings.ReplaceAll(versionMsg, "\n", "")) + noProgress := flags.GetBoolFlag(flags.NoProgressFlag) + if !term.IsTerminal(int(os.Stdin.Fd())) { + noProgress = true + } + proBarBuilder := progress.InitializePbBuilder( - flags.GetBoolFlag(flags.NoProgressFlag), + noProgress, flags.GetBoolFlag(flags.CIFlag), flags.GetBoolFlag(flags.SilentFlag)) @@ -504,38 +584,21 @@ func scan(changedDefaultQueryPath, changedDefaultLibrariesPath bool) error { return err } - err = preparePaths(changedDefaultQueryPath, changedDefaultLibrariesPath) - if err != nil { - return err - } - - extractedPaths, err := provider.GetSources(flags.GetMultiStrFlag(flags.PathFlag)) + store := storage.NewMemoryStorage() + extractedPaths, err := prepareAndAnalyzePaths(changedDefaultQueryPath, changedDefaultLibrariesPath) if err != nil { + log.Err(err) return err } - newTypeFlagValue, newExcludePathsFlagValue, errAnalyze := - analyzePaths(extractedPaths.Path, flags.GetMultiStrFlag(flags.TypeFlag), flags.GetMultiStrFlag(flags.ExcludePathsFlag)) - if errAnalyze != nil { - return errAnalyze - } - flags.SetMultiStrFlag(flags.TypeFlag, newTypeFlagValue) - flags.SetMultiStrFlag(flags.ExcludePathsFlag, newExcludePathsFlagValue) - - querySource := source.NewFilesystemSource( - flags.GetStrFlag(flags.QueriesPath), - flags.GetMultiStrFlag(flags.TypeFlag), - flags.GetMultiStrFlag(flags.CloudProviderFlag), - flags.GetStrFlag(flags.LibrariesPath)) - store := storage.NewMemoryStorage() - - inspector, err := createServiceAndStartScan(&startServiceParameters{ + excludeResultsMap := getExcludeResultsMap(flags.GetMultiStrFlag(flags.ExcludeResultsFlag)) + failedQueries, err := createServiceAndStartScan(&startServiceParameters{ t: t, store: store, - querySource: querySource, progressBar: progressBar, extractedPaths: extractedPaths.Path, pbBuilder: proBarBuilder, + excludeResults: excludeResultsMap, }) if err != nil { return err @@ -558,8 +621,12 @@ func scan(changedDefaultQueryPath, changedDefaultLibrariesPath bool) error { PathExtractionMap: extractedPaths.ExtractionMap, }) - if err := resolveOutputs(&summary, files.Combine(flags.GetBoolFlag(flags.LineInfoPayloadFlag)), - inspector.GetFailedQueries(), printer, *proBarBuilder); err != nil { + if err := resolveOutputs( + &summary, + files.Combine(flags.GetBoolFlag(flags.LineInfoPayloadFlag)), + failedQueries, + printer, + *proBarBuilder); err != nil { log.Err(err) return err } diff --git a/internal/storage/memory.go b/internal/storage/memory.go index 1ee029b8a88..cd7a6bfe7c6 100644 --- a/internal/storage/memory.go +++ b/internal/storage/memory.go @@ -2,6 +2,7 @@ package storage import ( "context" + "fmt" "github.com/Checkmarx/kics/pkg/model" "github.com/rs/zerolog/log" @@ -32,7 +33,31 @@ func (m *MemoryStorage) SaveVulnerabilities(_ context.Context, vulnerabilities [ // GetVulnerabilities returns a collection of vulnerabilities saved on MemoryStorage func (m *MemoryStorage) GetVulnerabilities(_ context.Context, _ string) ([]model.Vulnerability, error) { - return m.vulnerabilities, nil + return m.getUniqueVulnerabilities(), nil +} + +func (m *MemoryStorage) getUniqueVulnerabilities() []model.Vulnerability { + vulnDictionary := make(map[string]model.Vulnerability) + for i := range m.vulnerabilities { + key := fmt.Sprintf("%s:%s:%d:%s:%s:%s", + m.vulnerabilities[i].QueryID, + m.vulnerabilities[i].FileName, + m.vulnerabilities[i].Line, + m.vulnerabilities[i].SimilarityID, + m.vulnerabilities[i].SearchKey, + m.vulnerabilities[i].KeyActualValue, + ) + vulnDictionary[key] = m.vulnerabilities[i] + } + + var uniqueVulnerabilities []model.Vulnerability + for key := range vulnDictionary { + uniqueVulnerabilities = append(uniqueVulnerabilities, vulnDictionary[key]) + } + if len(uniqueVulnerabilities) == 0 { + return m.vulnerabilities + } + return uniqueVulnerabilities } // GetScanSummary is not supported by MemoryStorage diff --git a/internal/storage/memory_test.go b/internal/storage/memory_test.go index ae704029168..23e05ee6629 100644 --- a/internal/storage/memory_test.go +++ b/internal/storage/memory_test.go @@ -39,7 +39,7 @@ func TestMemoryStorage_SaveFile(t *testing.T) { ID: "id", ScanID: "scan_id", OriginalData: "orig_data", - FileName: "file_name", + FilePath: "file_name", }, }, wantErr: false, @@ -105,7 +105,7 @@ func TestMemoryStorage(t *testing.T) { // nolint ID: "id", ScanID: "scan_id", OriginalData: "orig_data", - FileName: "file_name", + FilePath: "file_name", }, }, }, @@ -121,7 +121,7 @@ func TestMemoryStorage(t *testing.T) { // nolint ID: "id", ScanID: "scan_id", OriginalData: "orig_data", - FileName: "file_name", + FilePath: "file_name", }, }, vulnerabilities: []model.Vulnerability{ diff --git a/internal/tracker/ci.go b/internal/tracker/ci.go index 7c1a1fc7d44..c6f573b7939 100644 --- a/internal/tracker/ci.go +++ b/internal/tracker/ci.go @@ -9,12 +9,14 @@ import ( // CITracker contains information of how many queries were loaded and executed // and how many files were found and executed type CITracker struct { - LoadedQueries int ExecutingQueries int ExecutedQueries int FoundFiles int - ParsedFiles int FailedSimilarityID int + LoadedQueries int + ParsedFiles int + ScanSecrets int + ScanPaths int lines int } @@ -69,3 +71,11 @@ func (c *CITracker) FailedDetectLine() { func (c *CITracker) FailedComputeSimilarityID() { c.FailedSimilarityID++ } + +func (c *CITracker) TrackScanSecret() { + c.ScanSecrets++ +} + +func (c *CITracker) TrackScanPath() { + c.ScanPaths++ +} diff --git a/pkg/detector/helm/helm_detect_test.go b/pkg/detector/helm/helm_detect_test.go index c33c29544f9..31c8064a31d 100644 --- a/pkg/detector/helm/helm_detect_test.go +++ b/pkg/detector/helm/helm_detect_test.go @@ -29,7 +29,7 @@ func TestEngine_detectHelmLine(t *testing.T) { //nolint ScanID: "console", Document: model.Document{}, Kind: model.KindHELM, - FileName: "test-connection.yaml", + FilePath: "test-connection.yaml", HelmID: "# KICS_HELM_ID_0", OriginalData: `# KICS_HELM_ID_0: apiVersion: v1 @@ -73,7 +73,7 @@ spec: ScanID: "console", Document: model.Document{}, Kind: model.KindHELM, - FileName: "test-dup_values.yaml", + FilePath: "test-dup_values.yaml", IDInfo: map[int]interface{}{0: map[int]int{0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7, 8: 8, 9: 9, 10: 10, 11: 11, 12: 12, 13: 13, 14: 14, 15: 15, 16: 16, 17: 17, 18: 18, 19: 19, 21: 21, 22: 22}}, @@ -126,7 +126,7 @@ spec: ScanID: "console", Document: model.Document{}, Kind: model.KindHELM, - FileName: "test-dups.yaml", + FilePath: "test-dups.yaml", HelmID: "# KICS_HELM_ID_1", OriginalData: `# KICS_HELM_ID_0: apiVersion: v1 diff --git a/pkg/engine/inspector.go b/pkg/engine/inspector.go index 69249afade7..3efa76edcb4 100644 --- a/pkg/engine/inspector.go +++ b/pkg/engine/inspector.go @@ -46,20 +46,6 @@ var ErrInvalidResult = errors.New("query: invalid result format") type VulnerabilityBuilder func(ctx *QueryContext, tracker Tracker, v interface{}, detector *detector.DetectLine) (model.Vulnerability, error) -// Tracker wraps an interface that contain basic methods: TrackQueryLoad, TrackQueryExecution and FailedDetectLine -// TrackQueryLoad increments the number of loaded queries -// TrackQueryExecution increments the number of queries executed -// FailedDetectLine decrements the number of queries executed -// GetOutputLines returns the number of lines to be displayed in results outputs -type Tracker interface { - TrackQueryLoad(queryAggregation int) - TrackQueryExecuting(queryAggregation int) - TrackQueryExecution(queryAggregation int) - FailedDetectLine() - FailedComputeSimilarityID() - GetOutputLines() int -} - type preparedQuery struct { opaQuery rego.PreparedEvalQuery metadata model.QueryMetadata @@ -115,7 +101,7 @@ func NewInspector( return nil, errors.Wrap(err, "failed to get queries") } - commonGeneralQuery, err := queriesSource.GetQueryLibrary("common") + commonLibrary, err := queriesSource.GetQueryLibrary("common") if err != nil { sentry.CaptureException(err) log.Err(err). @@ -139,10 +125,10 @@ func NewInspector( var opaQuery rego.PreparedEvalQuery store := inmem.NewFromReader(bytes.NewBufferString(metadata.InputData)) - if commonGeneralQuery != "" && platformGeneralQuery != "" { + if commonLibrary != "" && platformGeneralQuery != "" { opaQuery, err = rego.New( rego.Query(regoQuery), - rego.Module("Common", commonGeneralQuery), + rego.Module("Common", commonLibrary), rego.Module("Generic", platformGeneralQuery), rego.Module(metadata.Query, metadata.Content), rego.Store(store), @@ -183,7 +169,7 @@ func NewInspector( log.Info(). Msgf("Inspector initialized, number of queries=%d", queriesNumber) - lineDetctor := detector.NewDetectLine(tracker.GetOutputLines()). + lineDetector := detector.NewDetectLine(tracker.GetOutputLines()). Add(helm.DetectKindLine{}, model.KindHELM). Add(docker.DetectKindLine{}, model.KindDOCKER) @@ -196,7 +182,7 @@ func NewInspector( tracker: tracker, failedQueries: failedQueries, excludeResults: excludeResults, - detector: lineDetctor, + detector: lineDetector, queryExecTimeout: queryExecTimeout, }, nil } @@ -384,7 +370,7 @@ func (c *Inspector) decodeQueryResults(ctx *QueryContext, results rego.ResultSet } file := ctx.files[vulnerability.FileID] if shouldSkipFile(file.Commands, vulnerability.QueryID) { - log.Debug().Msgf("Skipping file %s for query %s", file.FileName, ctx.query.metadata.Query) + log.Debug().Msgf("Skipping file %s for query %s", file.FilePath, ctx.query.metadata.Query) continue } diff --git a/pkg/engine/inspector_test.go b/pkg/engine/inspector_test.go index bbb237b760b..046e9814759 100644 --- a/pkg/engine/inspector_test.go +++ b/pkg/engine/inspector_test.go @@ -3,6 +3,7 @@ package engine import ( "context" "fmt" + "io" "os" "path/filepath" "reflect" @@ -20,6 +21,7 @@ import ( "github.com/Checkmarx/kics/pkg/model" "github.com/Checkmarx/kics/pkg/progress" "github.com/Checkmarx/kics/test" + "github.com/rs/zerolog" "github.com/rs/zerolog/log" "github.com/stretchr/testify/require" @@ -29,6 +31,8 @@ import ( // TestInspector_EnableCoverageReport tests the functions [EnableCoverageReport()] and all the methods called by them func TestInspector_EnableCoverageReport(t *testing.T) { + log.Logger = log.Output(zerolog.ConsoleWriter{Out: io.Discard}) + type fields struct { queries []*preparedQuery vb VulnerabilityBuilder @@ -237,7 +241,7 @@ func TestInspect(t *testing.T) { //nolint Document: mockedFileMetadataDocument, OriginalData: "orig_data", Kind: "DOCKERFILE", - FileName: "assets/queries/dockerfile/add_instead_of_copy/test/positive.dockerfile", + FilePath: "assets/queries/dockerfile/add_instead_of_copy/test/positive.dockerfile", }, }, }, @@ -287,7 +291,7 @@ func TestInspect(t *testing.T) { //nolint Document: mockedFileMetadataDocument, OriginalData: "orig_data", Kind: "DOCKERFILE", - FileName: "assets/queries/dockerfile/add_instead_of_copy/test/positive.dockerfile", + FilePath: "assets/queries/dockerfile/add_instead_of_copy/test/positive.dockerfile", }, }, }, @@ -525,12 +529,12 @@ func TestEngine_LenQueriesByPlat(t *testing.T) { min: 100, }, { - name: "test_len_queries_plat_common", + name: "test_len_queries_plat_dockerfile", args: args{ queriesPath: filepath.FromSlash("./assets/queries"), - platform: []string{"common"}, + platform: []string{"dockerfile"}, }, - min: 0, + min: 50, }, } diff --git a/pkg/engine/secrets/inspector.go b/pkg/engine/secrets/inspector.go new file mode 100644 index 00000000000..9a30408901b --- /dev/null +++ b/pkg/engine/secrets/inspector.go @@ -0,0 +1,458 @@ +package secrets + +import ( + "context" + _ "embed" // Embed KICS regex rules + "encoding/json" + "fmt" + "math" + "regexp" + "strings" + "time" + + "github.com/Checkmarx/kics/assets" + "github.com/Checkmarx/kics/pkg/detector" + "github.com/Checkmarx/kics/pkg/detector/docker" + "github.com/Checkmarx/kics/pkg/detector/helm" + engine "github.com/Checkmarx/kics/pkg/engine" + "github.com/Checkmarx/kics/pkg/engine/similarity" + "github.com/Checkmarx/kics/pkg/engine/source" + "github.com/Checkmarx/kics/pkg/model" + "github.com/rs/zerolog/log" +) + +const ( + Base64Chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=" + HexChars = "1234567890abcdefABCDEF" +) + +var ( + SecretsQueryMetadata map[string]string +) + +type Inspector struct { + ctx context.Context + tracker engine.Tracker + detector *detector.DetectLine + excludeResults map[string]bool + regexQueries []RegexQuery + allowRules []AllowRule + vulnerabilities []model.Vulnerability + queryExecutionTimeout time.Duration + foundLines []int +} + +type Entropy struct { + Group int `json:"group"` + Min float64 `json:"min"` + Max float64 `json:"max"` +} + +type MultilineResult struct { + DetectLineGroup int `json:"detectLineGroup"` +} + +type AllowRule struct { + Description string `json:"description"` + RegexStr string `json:"regex"` + Regex *regexp.Regexp +} + +type RegexQuery struct { + ID string `json:"id"` + Name string `json:"name"` + Multiline MultilineResult `json:"multiline"` + RegexStr string `json:"regex"` + Entropies []Entropy `json:"entropies"` + AllowRules []AllowRule `json:"allowRules"` + Regex *regexp.Regexp +} + +type RegexRuleStruct struct { + Rules []RegexQuery `json:"rules"` + AllowRules []AllowRule `json:"allowRules"` +} + +type RuleMatch struct { + File string + RuleName string + Matches []string + Line int + Entropy float64 +} + +type lineVulneInfo struct { + lineContent string + lineNumber int + groups []string +} + +func NewInspector( + ctx context.Context, + excludeResults map[string]bool, + tracker engine.Tracker, + queryFilter *source.QueryInspectorParameters, + disableSecretsQuery bool, + executionTimeout int, + regexRulesContent string, +) (*Inspector, error) { + if disableSecretsQuery { + return &Inspector{ + ctx: ctx, + tracker: tracker, + excludeResults: excludeResults, + regexQueries: make([]RegexQuery, 0), + allowRules: make([]AllowRule, 0), + vulnerabilities: make([]model.Vulnerability, 0), + queryExecutionTimeout: time.Duration(executionTimeout) * time.Second, + }, nil + } + + lineDetector := detector.NewDetectLine(tracker.GetOutputLines()). + Add(helm.DetectKindLine{}, model.KindHELM). + Add(docker.DetectKindLine{}, model.KindDOCKER) + + err := json.Unmarshal([]byte(assets.SecretsQueryMetadataJSON), &SecretsQueryMetadata) + if err != nil { + return nil, err + } + queryExecutionTimeout := time.Duration(executionTimeout) * time.Second + + var allRegexQueries RegexRuleStruct + err = json.Unmarshal([]byte(regexRulesContent), &allRegexQueries) + if err != nil { + return nil, err + } + + return &Inspector{ + ctx: ctx, + detector: lineDetector, + excludeResults: excludeResults, + tracker: tracker, + regexQueries: compileRegexQueries(queryFilter, allRegexQueries.Rules), + allowRules: compileRegex(allRegexQueries.AllowRules), + vulnerabilities: make([]model.Vulnerability, 0), + queryExecutionTimeout: queryExecutionTimeout, + foundLines: make([]int, 0), + }, nil +} + +func (c *Inspector) Inspect(ctx context.Context, basePaths []string, + files model.FileMetadatas, currentQuery chan<- int64) ([]model.Vulnerability, error) { + for i := range c.regexQueries { + currentQuery <- 1 + + timeoutCtx, cancel := context.WithTimeout(ctx, c.queryExecutionTimeout*time.Second) + defer cancel() + for idx := range files { + select { + case <-timeoutCtx.Done(): + return c.vulnerabilities, timeoutCtx.Err() + default: + // check file content line by line + if c.regexQueries[i].Multiline == (MultilineResult{}) { + lines := c.detector.SplitLines(&files[idx]) + + for lineNumber, currentLine := range lines { + c.checkLineByLine(&c.regexQueries[i], basePaths, &files[idx], lineNumber, currentLine) + } + continue + } + + // check file content as a whole + c.checkFileContent(&c.regexQueries[i], basePaths, &files[idx]) + } + } + } + return c.vulnerabilities, nil +} + +func compileRegexQueries(queryFilter *source.QueryInspectorParameters, allRegexQueries []RegexQuery) []RegexQuery { + var regexQueries []RegexQuery + + for i := range allRegexQueries { + if len(queryFilter.IncludeQueries.ByIDs) > 0 { + if isValueInArray(allRegexQueries[i].ID, queryFilter.IncludeQueries.ByIDs) { + regexQueries = append(regexQueries, allRegexQueries[i]) + } + } else { + if isValueInArray(allRegexQueries[i].ID, queryFilter.ExcludeQueries.ByIDs) { + log.Debug(). + Msgf("Excluding query ID: %s category: %s severity: %s", + allRegexQueries[i].ID, + SecretsQueryMetadata["category"], + SecretsQueryMetadata["severity"]) + continue + } + regexQueries = append(regexQueries, allRegexQueries[i]) + } + } + for i := range regexQueries { + regexQueries[i].Regex = regexp.MustCompile(regexQueries[i].RegexStr) + for j := range regexQueries[i].AllowRules { + regexQueries[i].AllowRules[j].Regex = regexp.MustCompile(regexQueries[i].AllowRules[j].RegexStr) + } + } + return regexQueries +} + +func compileRegex(allowRules []AllowRule) []AllowRule { + for j := range allowRules { + allowRules[j].Regex = regexp.MustCompile(allowRules[j].RegexStr) + } + return allowRules +} + +func (c *Inspector) GetQueriesLength() int { + return len(c.regexQueries) +} + +func isValueInArray(value string, array []string) bool { + for i := range array { + if value == array[i] { + return true + } + } + return false +} + +func (c *Inspector) isSecret(s string, query *RegexQuery) (isSecretRet bool, groups [][]string) { + if isAllowRule(s, query.AllowRules) || isAllowRule(s, c.allowRules) { + return false, [][]string{} + } + + groups = query.Regex.FindAllStringSubmatch(s, -1) + + for _, group := range groups { + splitedText := strings.Split(s, "\n") + max := -1 + for i, splited := range splitedText { + if len(groups) < query.Multiline.DetectLineGroup { + if strings.Contains(splited, group[query.Multiline.DetectLineGroup]) && i > max { + max = i + } + } + } + if max == -1 { + continue + } + secret, newGroups := c.isSecret(strings.Join(append(splitedText[:max], splitedText[max+1:]...), "\n"), query) + if !secret { + continue + } + groups = append(groups, newGroups...) + } + + if len(groups) > 0 { + return true, groups + } + return false, [][]string{} +} + +func isAllowRule(s string, allowRules []AllowRule) bool { + for i := range allowRules { + if allowRules[i].Regex.MatchString(s) { + return true + } + } + return false +} + +func (c *Inspector) checkFileContent(query *RegexQuery, basePaths []string, file *model.FileMetadata) { + isSecret, groups := c.isSecret(file.OriginalData, query) + if !isSecret { + return + } + + lineVulns := c.secretsDetectLine(query, file, groups) + + for _, lineVuln := range lineVulns { + if len(query.Entropies) == 0 { + c.addVulnerability( + basePaths, + file, + query, + lineVuln.lineNumber, + lineVuln.lineContent, + ) + } + + if len(lineVuln.groups) > 0 { + for _, entropy := range query.Entropies { + // if matched group does not exist continue + if len(lineVuln.groups) <= entropy.Group { + return + } + isMatch, entropyFloat := CheckEntropyInterval( + entropy, + lineVuln.groups[entropy.Group], + ) + log.Debug().Msgf("match: %v :: %v", isMatch, fmt.Sprint(entropyFloat)) + + if isMatch { + c.addVulnerability( + basePaths, + file, + query, + lineVuln.lineNumber, + lineVuln.lineContent, + ) + } + } + } + } +} + +func (c *Inspector) secretsDetectLine(query *RegexQuery, file *model.FileMetadata, vulnGroups [][]string) []lineVulneInfo { + content := file.OriginalData + lines := c.detector.SplitLines(file) + lineVulneInfoSlice := make([]lineVulneInfo, 0) + realLineUpdater := 0 + for _, groups := range vulnGroups { + lineVulneInfoObject := lineVulneInfo{ + lineNumber: -1, + lineContent: "-", + groups: groups, + } + + if len(groups) <= query.Multiline.DetectLineGroup { + log.Warn().Msgf("Unable to detect line in file %v Multiline group not found: %v", file.FilePath, query.Multiline.DetectLineGroup) + lineVulneInfoSlice = append(lineVulneInfoSlice, lineVulneInfoObject) + continue + } + + contentMatchRemoved := strings.Replace(content, groups[query.Multiline.DetectLineGroup], "", 1) + + text := strings.ReplaceAll(contentMatchRemoved, "\r", "") + contentMatchRemovedLines := strings.Split(text, "\n") + for i := 0; i < len(lines); i++ { + if lines[i] != contentMatchRemovedLines[i] { + lineVulneInfoObject.lineNumber = i + realLineUpdater + lineVulneInfoObject.lineContent = lines[i] + break + } + } + + realLineUpdater += len(lines) - len(contentMatchRemovedLines) + content = contentMatchRemoved + lines = contentMatchRemovedLines + + lineVulneInfoSlice = append(lineVulneInfoSlice, lineVulneInfoObject) + } + + return lineVulneInfoSlice +} + +func (c *Inspector) checkLineByLine(query *RegexQuery, basePaths []string, file *model.FileMetadata, lineNumber int, currentLine string) { + isSecret, groups := c.isSecret(currentLine, query) + if !isSecret { + return + } + + if len(query.Entropies) == 0 { + c.addVulnerability( + basePaths, + file, + query, + lineNumber, + currentLine, + ) + } + + for i := range query.Entropies { + entropy := query.Entropies[i] + + // if matched group does not exist continue + if len(groups[0]) <= entropy.Group { + return + } + + isMatch, entropyFloat := CheckEntropyInterval( + entropy, + groups[0][entropy.Group], + ) + log.Debug().Msgf("match: %v :: %v", isMatch, fmt.Sprint(entropyFloat)) + + if isMatch { + c.addVulnerability( + basePaths, + file, + query, + lineNumber, + currentLine, + ) + } + } +} + +func (c *Inspector) addVulnerability(basePaths []string, file *model.FileMetadata, query *RegexQuery, lineNumber int, issueLine string) { + simID, err := similarity.ComputeSimilarityID( + basePaths, + file.FilePath, + query.ID, + fmt.Sprintf("%d", lineNumber), + "", + ) + if err != nil { + log.Error().Msg("unable to compute similarity ID") + } + + if _, ok := c.excludeResults[engine.PtrStringToString(simID)]; !ok { + linesVuln := c.detector.GetAdjecent(file, lineNumber+1) + vuln := model.Vulnerability{ + QueryID: query.ID, + QueryName: SecretsQueryMetadata["queryName"] + " - " + query.Name, + SimilarityID: engine.PtrStringToString(simID), + FileID: file.ID, + FileName: file.FilePath, + Line: linesVuln.Line, + VulnLines: linesVuln.VulnLines, + IssueType: "RedundantAttribute", + Platform: SecretsQueryMetadata["platform"], + Severity: model.SeverityHigh, + QueryURI: SecretsQueryMetadata["descriptionUrl"], + Category: SecretsQueryMetadata["category"], + Description: SecretsQueryMetadata["descriptionText"], + DescriptionID: SecretsQueryMetadata["descriptionID"], + KeyExpectedValue: "Hardcoded secret key should not appear in source", + KeyActualValue: fmt.Sprintf("'%s' contains a secret", issueLine), + } + c.vulnerabilities = append(c.vulnerabilities, vuln) + } +} + +// CheckEntropyInterval - verifies if a given token's entropy is within expected bounds +func CheckEntropyInterval(entropy Entropy, token string) (isEntropyInInterval bool, entropyLevel float64) { + base64Entropy := calculateEntropy(token, Base64Chars) + hexEntropy := calculateEntropy(token, HexChars) + highestEntropy := math.Max(base64Entropy, hexEntropy) + if insideInterval(entropy, base64Entropy) || insideInterval(entropy, hexEntropy) { + return true, highestEntropy + } + return false, highestEntropy +} + +func insideInterval(entropy Entropy, floatEntropy float64) bool { + return floatEntropy >= entropy.Min && floatEntropy <= entropy.Max +} + +// calculateEntropy - calculates the entropy of a string based on the Shannon formula +func calculateEntropy(token, charSet string) float64 { + if token == "" { + return 0 + } + charMap := map[rune]float64{} + for _, char := range token { + if strings.Contains(charSet, string(char)) { + charMap[char]++ + } + } + + var freq float64 + length := float64(len(token)) + for _, count := range charMap { + freq += count * math.Log2(count) + } + + return math.Log2(length) - freq/length +} diff --git a/pkg/engine/secrets/inspector_test.go b/pkg/engine/secrets/inspector_test.go new file mode 100644 index 00000000000..123d028a586 --- /dev/null +++ b/pkg/engine/secrets/inspector_test.go @@ -0,0 +1,583 @@ +package secrets + +import ( + "context" + "path/filepath" + "sync" + "testing" + + "github.com/Checkmarx/kics/assets" + "github.com/Checkmarx/kics/internal/tracker" + "github.com/Checkmarx/kics/pkg/engine/source" + "github.com/Checkmarx/kics/pkg/model" + "github.com/Checkmarx/kics/pkg/progress" + "github.com/stretchr/testify/require" +) + +var testCompileRegexesInput = []struct { + name string + inspectorParams *source.QueryInspectorParameters + allRegexQueries []RegexQuery + wantIDs []string +}{ + { + name: "empty_query", + inspectorParams: &source.QueryInspectorParameters{ + IncludeQueries: source.IncludeQueries{ByIDs: []string{}}, + ExcludeQueries: source.ExcludeQueries{ByIDs: []string{}, ByCategories: []string{}}, + InputDataPath: "", + }, + allRegexQueries: []RegexQuery{}, + wantIDs: []string{}, + }, + { + name: "one_query", + inspectorParams: &source.QueryInspectorParameters{ + IncludeQueries: source.IncludeQueries{ByIDs: []string{}}, + ExcludeQueries: source.ExcludeQueries{ByIDs: []string{}, ByCategories: []string{}}, + InputDataPath: "", + }, + allRegexQueries: []RegexQuery{ + { + ID: "487f4be7-3fd9-4506-a07a-eae252180c08", + Name: "Generic Password", + RegexStr: `['|"]?[p|P][a|A][s|S][s|S][w|W][o|O][r|R][d|D]['|\"]?\s*[:|=]\s*['|"]?([A-Za-z0-9/~^_!@&%()=?*+-]{4,})['|"]?`, + }, + }, + wantIDs: []string{"487f4be7-3fd9-4506-a07a-eae252180c08"}, + }, + { + name: "three_queries", + inspectorParams: &source.QueryInspectorParameters{ + IncludeQueries: source.IncludeQueries{ByIDs: []string{}}, + ExcludeQueries: source.ExcludeQueries{ByIDs: []string{}, ByCategories: []string{}}, + InputDataPath: "", + }, + allRegexQueries: []RegexQuery{ + { + ID: "487f4be7-3fd9-4506-a07a-eae252180c08", + Name: "Generic Password", + RegexStr: `['|"]?[p|P][a|A][s|S][s|S][w|W][o|O][r|R][d|D]['|\"]?\s*[:|=]\s*['|"]?([A-Za-z0-9/~^_!@&%()=?*+-]{4,})['|"]?`, + }, + { + ID: "4b2b5fd3-364d-4093-bac2-17391b2a5297", + Name: "K8s Environment Variable Password", + RegexStr: `apiVersion((.*)\s*)*env:((.*)\s*)*name:\s*\w+[P|p][A|a][S|s][S|s]([W|w][O|o][R|r][D|d])?\w*\s*(value):\s*(["|'].*["|'])`, + Multiline: MultilineResult{ + DetectLineGroup: 7, + }, + }, + { + ID: "c4d3b58a-e6d4-450f-9340-04f1e702eaae", + Name: "Password in URL", + RegexStr: `[a-zA-Z]{3,10}://[^/\s:@]*?:[^/\s:@]*?@[^/\s:@]*`, + }, + }, + wantIDs: []string{"487f4be7-3fd9-4506-a07a-eae252180c08", "4b2b5fd3-364d-4093-bac2-17391b2a5297", "c4d3b58a-e6d4-450f-9340-04f1e702eaae"}, + }, + { + name: "include_one", + inspectorParams: &source.QueryInspectorParameters{ + IncludeQueries: source.IncludeQueries{ByIDs: []string{"487f4be7-3fd9-4506-a07a-eae252180c08"}}, + ExcludeQueries: source.ExcludeQueries{ByIDs: []string{}, ByCategories: []string{}}, + InputDataPath: "", + }, + allRegexQueries: []RegexQuery{ + { + ID: "487f4be7-3fd9-4506-a07a-eae252180c08", + Name: "Generic Password", + RegexStr: `['|"]?[p|P][a|A][s|S][s|S][w|W][o|O][r|R][d|D]['|\"]?\s*[:|=]\s*['|"]?([A-Za-z0-9/~^_!@&%()=?*+-]{4,})['|"]?`, + }, + { + ID: "4b2b5fd3-364d-4093-bac2-17391b2a5297", + Name: "K8s Environment Variable Password", + RegexStr: `apiVersion((.*)\s*)*env:((.*)\s*)*name:\s*\w+[P|p][A|a][S|s][S|s]([W|w][O|o][R|r][D|d])?\w*\s*(value):\s*(["|'].*["|'])`, + Multiline: MultilineResult{ + DetectLineGroup: 7, + }, + }, + { + ID: "c4d3b58a-e6d4-450f-9340-04f1e702eaae", + Name: "Password in URL", + RegexStr: `[a-zA-Z]{3,10}://[^/\s:@]*?:[^/\s:@]*?@[^/\s:@]*`, + }, + }, + wantIDs: []string{"487f4be7-3fd9-4506-a07a-eae252180c08"}, + }, + { + name: "exclude_one", + inspectorParams: &source.QueryInspectorParameters{ + IncludeQueries: source.IncludeQueries{ByIDs: []string{}}, + ExcludeQueries: source.ExcludeQueries{ByIDs: []string{"c4d3b58a-e6d4-450f-9340-04f1e702eaae"}, ByCategories: []string{}}, + InputDataPath: "", + }, + allRegexQueries: []RegexQuery{ + { + ID: "487f4be7-3fd9-4506-a07a-eae252180c08", + Name: "Generic Password", + RegexStr: `['|"]?[p|P][a|A][s|S][s|S][w|W][o|O][r|R][d|D]['|\"]?\s*[:|=]\s*['|"]?([A-Za-z0-9/~^_!@&%()=?*+-]{4,})['|"]?`, + }, + { + ID: "4b2b5fd3-364d-4093-bac2-17391b2a5297", + Name: "K8s Environment Variable Password", + RegexStr: `apiVersion((.*)\s*)*env:((.*)\s*)*name:\s*\w+[P|p][A|a][S|s][S|s]([W|w][O|o][R|r][D|d])?\w*\s*(value):\s*(["|'].*["|'])`, + Multiline: MultilineResult{ + DetectLineGroup: 7, + }, + }, + { + ID: "c4d3b58a-e6d4-450f-9340-04f1e702eaae", + Name: "Password in URL", + RegexStr: `[a-zA-Z]{3,10}://[^/\s:@]*?:[^/\s:@]*?@[^/\s:@]*`, + }, + }, + wantIDs: []string{"487f4be7-3fd9-4506-a07a-eae252180c08", "4b2b5fd3-364d-4093-bac2-17391b2a5297"}, + }, +} + +var testInspectInput = []struct { + name string + files model.FileMetadatas + wantVuln []model.Vulnerability + wantErr bool +}{ + { + name: "valid_no_results", + files: model.FileMetadatas{ + { + ID: "853012ab-cc05-4c1c-b517-9c3552085ee8", + Document: model.Document{}, + OriginalData: ` +resource "google_container_cluster" "primary3" { +name = "marcellus-wallace" +location = "us-central1-a" +initial_node_count = 3 + +master_auth { + username = "1234567890qwertyuiopasdfghjklçzxcvbnm" + password = "" + + client_certificate_config { + issue_client_certificate = true + } +} +}`, + Kind: "TF", + FilePath: "assets/queries/common/passwords_and_secrets/test/negative7.tf", + }, + }, + wantVuln: []model.Vulnerability{}, + wantErr: false, + }, + { + name: "valid_one_result", + files: model.FileMetadatas{ + { + ID: "b032c51d-2e7c-4ffc-8a81-41405c166bc8", + Document: model.Document{}, + OriginalData: ` +apiVersion: v1 +kind: Secret +metadata: +name: secret-basic-auth +type: kubernetes.io/basic-auth +stringData: +password: "root"`, + Kind: "K8S", + FilePath: "assets/queries/common/passwords_and_secrets/test/positive1.yaml", + }, + }, + wantVuln: []model.Vulnerability{ + { + QueryID: "487f4be7-3fd9-4506-a07a-eae252180c08", + QueryName: "Passwords And Secrets - Generic Password", + Severity: model.SeverityHigh, + Category: "Secret Management", + Description: "Query to find passwords and secrets in infrastructure code.", + }, + }, + wantErr: false, + }, + { + name: "valid_one_multiline_result", + files: model.FileMetadatas{ + { + ID: "d274e272-a4af-497e-a900-a277500e4182", + Document: model.Document{}, + OriginalData: ` +resource "aws_transfer_ssh_key" "example" { +server_id = aws_transfer_server.example.id +user_name = aws_transfer_user.example.user_name +body = <= 0 { - s.similarityIDLineInfo = strconv.Itoa(s.lineNr) - s.linesVulne = s.detector.GetAdjecent(&s.file, s.lineNr) - } - } -} diff --git a/pkg/engine/vulnerability_builder_test.go b/pkg/engine/vulnerability_builder_test.go index ea8962fc2cc..7462f65e592 100644 --- a/pkg/engine/vulnerability_builder_test.go +++ b/pkg/engine/vulnerability_builder_test.go @@ -2,7 +2,6 @@ package engine import ( "encoding/json" - "fmt" "reflect" "testing" @@ -12,169 +11,6 @@ import ( "github.com/stretchr/testify/require" ) -// TestMapKeyToString tests the functions [MapKeyToString()] and all the methods called by them -func TestMapKeyToString(t *testing.T) { - testCases := []struct { - payload interface{} - expected string - }{ - { - payload: "test", - expected: "test", - }, - { - payload: 123, - expected: "123", - }, - { - payload: 0.123, - expected: "0.123", - }, - { - payload: false, - expected: "false", - }, - { - payload: nil, - expected: "null", - }, - } - - for i, testCase := range testCases { - t.Run(fmt.Sprintf("mapKeyToString-%d", i), func(t *testing.T) { - v, err := mapKeyToString(map[string]interface{}{"key": testCase.payload}, "key", false) - require.Nil(t, err) - require.Equal(t, testCase.expected, *v) - }) - } - for i, testCase := range testCases { - t.Run(fmt.Sprintf("mapKeyToString-%d", i), func(t *testing.T) { - _, err := mapKeyToString(map[string]interface{}{"t": testCase.payload}, "key", false) - require.Error(t, err) - }) - } -} - -// Test_mergeWithMetadata tests the functions [mergeWithMetadata()] and all the methods called by them -func Test_mergeWithMetadata(t *testing.T) { - type args struct { - base map[string]interface{} - additional map[string]interface{} - } - tests := []struct { - name string - args args - want map[string]interface{} - }{ - { - name: "mergeWithMetadata", - args: args{ - base: map[string]interface{}{ - "key": "123", - }, - additional: map[string]interface{}{ - "key": "teste", - }, - }, - want: map[string]interface{}{ - "key": "123", - }, - }, - { - name: "mergeWithMetadata_2", - args: args{ - base: map[string]interface{}{ - "key": "123", - }, - additional: map[string]interface{}{ - "r": "teste2", - }, - }, - want: map[string]interface{}{ - "key": "123", - "r": "teste2", - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if got := mergeWithMetadata(tt.args.base, tt.args.additional); !reflect.DeepEqual(got, tt.want) { - t.Errorf("mergeWithMetadata() = %v, want %v", got, tt.want) - } - }) - } -} - -// Test_mustMapKeyToString tests the functions [mustMapKeyToString()] and all the methods called by them -func Test_mustMapKeyToString(t *testing.T) { - type args struct { - m map[string]interface{} - key string - } - tests := []struct { - name string - args args - want string - }{ - { - name: "mustMapKeyToString", - args: args{ - m: map[string]interface{}{ - "key": 123, - }, - key: "key", - }, - want: "123", - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got := mustMapKeyToString(tt.args.m, tt.args.key) - require.Equal(t, tt.want, *got) - }) - } -} - -// Test_ptrStringToString tests the functions [ptrStringToString()] and all the methods called by them -func Test_ptrStringToString(t *testing.T) { - type args struct { - v string - } - tests := []struct { - name string - args args - want string - }{ - { - name: "ptrStringToString", - args: args{ - v: "123", - }, - want: "123", - }, - { - name: "ptrStringToString_empty", - args: args{ - v: "nil", - }, - want: "", - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if tt.args.v == "nil" { - if got := ptrStringToString(nil); got != tt.want { - t.Errorf("ptrStringToString() = %v, want %v", got, tt.want) - } - } else { - if got := ptrStringToString(&tt.args.v); got != tt.want { - t.Errorf("ptrStringToString() = %v, want %v", got, tt.want) - } - } - }) - } -} - type vbArgs struct { ctx *QueryContext v interface{} @@ -353,7 +189,7 @@ func TestDefaultVulnerabilityBuilder(t *testing.T) { t.Run(tt.name, func(t *testing.T) { got, err := DefaultVulnerabilityBuilder(tt.args.ctx, tt.args.tracker, tt.args.v, insDetector) if (err != nil) != tt.wantErr { - t.Errorf("DefaultVulnerabilityBuilder() error %v, wantErr %v", err, tt.wantErr) + t.Errorf("test[%s] DefaultVulnerabilityBuilder() error %v, wantErr %v", tt.name, err, tt.wantErr) return } if !reflect.DeepEqual(got, tt.want) { @@ -362,7 +198,7 @@ func TestDefaultVulnerabilityBuilder(t *testing.T) { require.NoError(t, err) wantJSON, err = json.Marshal(tt.want) require.NoError(t, err) - t.Errorf("DefaultVulnerabilityBuilder() got = %v,\n want = %v", string(gotJSON), string(wantJSON)) + t.Errorf("test[%s] DefaultVulnerabilityBuilder() got = %v,\n want = %v", tt.name, string(gotJSON), string(wantJSON)) } }) } diff --git a/pkg/engine/vulnerability_utils.go b/pkg/engine/vulnerability_utils.go new file mode 100644 index 00000000000..3744da22243 --- /dev/null +++ b/pkg/engine/vulnerability_utils.go @@ -0,0 +1,151 @@ +package engine + +import ( + "encoding/json" + "fmt" + "strconv" + + dec "github.com/Checkmarx/kics/pkg/detector" + "github.com/Checkmarx/kics/pkg/model" + "github.com/rs/zerolog" + "github.com/rs/zerolog/log" +) + +type searchLineCalculator struct { + lineNr int + vObj map[string]interface{} + file model.FileMetadata + detector *dec.DetectLine + similarityIDLineInfo string + linesVulne model.VulnerabilityLines +} + +func (s *searchLineCalculator) calculate() { + if searchLine, ok := s.vObj["searchLine"]; ok { + line := make([]string, 0, len(searchLine.([]interface{}))) + for _, strElement := range searchLine.([]interface{}) { + line = append(line, strElement.(string)) + } + var err error + s.lineNr, err = dec.GetLineBySearchLine(line, &s.file) + if err != nil { + log.Error().Msgf("failed to get line information from searchLine, using searchKey") + } + if s.lineNr >= 0 { + s.similarityIDLineInfo = strconv.Itoa(s.lineNr) + s.linesVulne = s.detector.GetAdjecent(&s.file, s.lineNr) + } + } +} + +func mergeWithMetadata(base, additional map[string]interface{}) map[string]interface{} { + for k, v := range additional { + if _, ok := base[k]; ok { + continue + } + + base[k] = v + } + + return base +} + +func mustMapKeyToString(m map[string]interface{}, key string) *string { + res, err := mapKeyToString(m, key, true) + if err != nil && key != "value" { + log.Warn(). + Str("reason", err.Error()). + Msgf("Failed to get key %s in map", key) + } + + return res +} + +func mapKeyToString(m map[string]interface{}, key string, allowNil bool) (*string, error) { + v, ok := m[key] + if !ok { + return nil, fmt.Errorf("key '%s' not found in map", key) + } + + switch vv := v.(type) { + case json.Number: + return stringToPtrString(vv.String()), nil + case string: + return stringToPtrString(vv), nil + case int, int32, int64: + return stringToPtrString(fmt.Sprintf("%d", vv)), nil + case float32: + return stringToPtrString(strconv.FormatFloat(float64(vv), 'f', -1, formatFloat64)), nil + case float64: + return stringToPtrString(strconv.FormatFloat(vv, 'f', -1, formatFloat64)), nil + case nil: + if allowNil { + return nil, nil + } + return stringToPtrString("null"), nil + case bool: + return stringToPtrString(fmt.Sprintf("%v", vv)), nil + } + + log.Debug(). + Msg("Detecting line. can't format item to string") + + if allowNil { + return nil, nil + } + + return stringToPtrString(""), nil +} + +func stringToPtrString(v string) *string { + return &v +} + +// PtrStringToString - converts a pointer to string to a string +func PtrStringToString(v *string) string { + if v == nil { + return "" + } + return *v +} + +func tryOverride(overrideKey, vulnParam string, vObj map[string]interface{}) *string { + if overrideKey != "" { + if override, ok := vObj["override"].(map[string]interface{}); ok { + if overrideObject, ok := override[overrideKey].(map[string]interface{}); ok { + if _, ok := overrideObject[vulnParam]; ok { + overrideValue, err := mapKeyToString(overrideObject, vulnParam, true) + if err != nil { + return nil + } else if overrideValue != nil { + return overrideValue + } + } + } + } + } + return nil +} + +func getStringFromMap(vulnParam, defaultParam, overrideKey string, vObj map[string]interface{}, logWithFields *zerolog.Logger) string { + ts, err := mapKeyToString(vObj, vulnParam, false) + if err != nil { + logWithFields.Err(err). + Msgf("Saving result. failed to detect %s", vulnParam) + return defaultParam + } + overrideValue := tryOverride(overrideKey, vulnParam, vObj) + if overrideValue != nil { + ts = overrideValue + } + return *ts +} + +func getSeverity(severity string) model.Severity { + for _, si := range model.AllSeverities { + if severity == string(si) { + return si + } + } + return "" +} diff --git a/pkg/engine/vulnerability_utils_test.go b/pkg/engine/vulnerability_utils_test.go new file mode 100644 index 00000000000..67424057818 --- /dev/null +++ b/pkg/engine/vulnerability_utils_test.go @@ -0,0 +1,172 @@ +package engine + +import ( + "fmt" + "reflect" + "testing" + + "github.com/stretchr/testify/require" +) + +// TestMapKeyToString tests the functions [MapKeyToString()] and all the methods called by them +func TestMapKeyToString(t *testing.T) { + testCases := []struct { + payload interface{} + expected string + }{ + { + payload: "test", + expected: "test", + }, + { + payload: 123, + expected: "123", + }, + { + payload: 0.123, + expected: "0.123", + }, + { + payload: false, + expected: "false", + }, + { + payload: nil, + expected: "null", + }, + } + + for i, testCase := range testCases { + t.Run(fmt.Sprintf("mapKeyToString-%d", i), func(t *testing.T) { + v, err := mapKeyToString(map[string]interface{}{"key": testCase.payload}, "key", false) + require.Nil(t, err) + require.Equal(t, testCase.expected, *v) + }) + } + for i, testCase := range testCases { + t.Run(fmt.Sprintf("mapKeyToString-%d", i), func(t *testing.T) { + _, err := mapKeyToString(map[string]interface{}{"t": testCase.payload}, "key", false) + require.Error(t, err) + }) + } +} + +// Test_mergeWithMetadata tests the functions [mergeWithMetadata()] and all the methods called by them +func Test_mergeWithMetadata(t *testing.T) { + type args struct { + base map[string]interface{} + additional map[string]interface{} + } + tests := []struct { + name string + args args + want map[string]interface{} + }{ + { + name: "mergeWithMetadata", + args: args{ + base: map[string]interface{}{ + "key": "123", + }, + additional: map[string]interface{}{ + "key": "teste", + }, + }, + want: map[string]interface{}{ + "key": "123", + }, + }, + { + name: "mergeWithMetadata_2", + args: args{ + base: map[string]interface{}{ + "key": "123", + }, + additional: map[string]interface{}{ + "r": "teste2", + }, + }, + want: map[string]interface{}{ + "key": "123", + "r": "teste2", + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := mergeWithMetadata(tt.args.base, tt.args.additional); !reflect.DeepEqual(got, tt.want) { + t.Errorf("mergeWithMetadata() = %v, want %v", got, tt.want) + } + }) + } +} + +// Test_mustMapKeyToString tests the functions [mustMapKeyToString()] and all the methods called by them +func Test_mustMapKeyToString(t *testing.T) { + type args struct { + m map[string]interface{} + key string + } + tests := []struct { + name string + args args + want string + }{ + { + name: "mustMapKeyToString", + args: args{ + m: map[string]interface{}{ + "key": 123, + }, + key: "key", + }, + want: "123", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := mustMapKeyToString(tt.args.m, tt.args.key) + require.Equal(t, tt.want, *got) + }) + } +} + +// Test_ptrStringToString tests the functions [ptrStringToString()] and all the methods called by them +func Test_PtrStringToString(t *testing.T) { + type args struct { + v string + } + tests := []struct { + name string + args args + want string + }{ + { + name: "PtrStringToString", + args: args{ + v: "123", + }, + want: "123", + }, + { + name: "PtrStringToString_empty", + args: args{ + v: "nil", + }, + want: "", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if tt.args.v == "nil" { + if got := PtrStringToString(nil); got != tt.want { + t.Errorf("PtrStringToString() = %v, want %v", got, tt.want) + } + } else { + if got := PtrStringToString(&tt.args.v); got != tt.want { + t.Errorf("PtrStringToString() = %v, want %v", got, tt.want) + } + } + }) + } +} diff --git a/pkg/kics/resolver_sink.go b/pkg/kics/resolver_sink.go index faa7fca218d..44b7dcbc499 100644 --- a/pkg/kics/resolver_sink.go +++ b/pkg/kics/resolver_sink.go @@ -48,7 +48,7 @@ func (s *Service) resolverSink(ctx context.Context, filename, scanID string) ([] Document: document, OriginalData: string(rfile.OriginalData), Kind: kind, - FileName: rfile.FileName, + FilePath: rfile.FileName, Content: string(rfile.Content), HelmID: rfile.SplitID, IDInfo: rfile.IDInfo, diff --git a/pkg/kics/service.go b/pkg/kics/service.go index 7cf42b5e793..2317bd6b400 100644 --- a/pkg/kics/service.go +++ b/pkg/kics/service.go @@ -7,6 +7,7 @@ import ( "github.com/Checkmarx/kics/pkg/engine" "github.com/Checkmarx/kics/pkg/engine/provider" + "github.com/Checkmarx/kics/pkg/engine/secrets" "github.com/Checkmarx/kics/pkg/model" "github.com/Checkmarx/kics/pkg/parser" "github.com/Checkmarx/kics/pkg/resolver" @@ -42,23 +43,17 @@ type Tracker interface { // a parser to parse and provide files in format that KICS understand, a inspector that runs the scanning and a tracker to // update scanning numbers type Service struct { - SourceProvider provider.SourceProvider - Storage Storage - Parser *parser.Parser - Inspector *engine.Inspector - Tracker Tracker - Resolver *resolver.Resolver - files model.FileMetadatas + SourceProvider provider.SourceProvider + Storage Storage + Parser *parser.Parser + Inspector *engine.Inspector + SecretsInspector *secrets.Inspector + Tracker Tracker + Resolver *resolver.Resolver + files model.FileMetadatas } -// StartScan executes scan over the context, using the scanID as reference -func (s *Service) StartScan( - ctx context.Context, - scanID string, - errCh chan<- error, - wg *sync.WaitGroup, - currentQuery chan<- int64) { - log.Debug().Msg("service.StartScan()") +func (s *Service) PrepareSources(ctx context.Context, scanID string, wg *sync.WaitGroup, errCh chan<- error) { defer wg.Done() // CxSAST query under review if err := s.SourceProvider.GetSources( @@ -73,6 +68,28 @@ func (s *Service) StartScan( ); err != nil { errCh <- errors.Wrap(err, "failed to read sources") } +} + +// StartScan executes scan over the context, using the scanID as reference +func (s *Service) StartScan( + ctx context.Context, + scanID string, + errCh chan<- error, + wg *sync.WaitGroup, + currentQuery chan<- int64) { + log.Debug().Msg("service.StartScan()") + defer wg.Done() + + secretsVulnerabilities, err := s.SecretsInspector.Inspect( + ctx, + s.SourceProvider.GetBasePaths(), + s.files, + currentQuery, + ) + if err != nil { + errCh <- errors.Wrap(err, "failed to inspect secrets") + } + vulnerabilities, err := s.Inspector.Inspect( ctx, scanID, @@ -84,6 +101,8 @@ func (s *Service) StartScan( if err != nil { errCh <- errors.Wrap(err, "failed to inspect files") } + vulnerabilities = append(vulnerabilities, secretsVulnerabilities...) + err = s.Storage.SaveVulnerabilities(ctx, vulnerabilities) if err != nil { errCh <- errors.Wrap(err, "failed to save vulnerabilities") diff --git a/pkg/kics/service_test.go b/pkg/kics/service_test.go index 21810babfe9..134b292ce90 100644 --- a/pkg/kics/service_test.go +++ b/pkg/kics/service_test.go @@ -11,6 +11,7 @@ import ( "github.com/Checkmarx/kics/internal/tracker" "github.com/Checkmarx/kics/pkg/engine" "github.com/Checkmarx/kics/pkg/engine/provider" + "github.com/Checkmarx/kics/pkg/engine/secrets" "github.com/Checkmarx/kics/pkg/model" "github.com/Checkmarx/kics/pkg/parser" dockerParser "github.com/Checkmarx/kics/pkg/parser/docker" @@ -25,12 +26,13 @@ import ( func TestService(t *testing.T) { //nolint mockParser, mockFilesSource, mockResolver := createParserSourceProvider("../../test/fixtures/test_helm") type fields struct { - SourceProvider provider.SourceProvider - Storage Storage - Parser []*parser.Parser - Inspector *engine.Inspector - Tracker Tracker - Resolver *resolver.Resolver + SourceProvider provider.SourceProvider + Storage Storage + Parser []*parser.Parser + Inspector *engine.Inspector + SecretsInspector *secrets.Inspector + Tracker Tracker + Resolver *resolver.Resolver } type args struct { ctx context.Context @@ -51,12 +53,13 @@ func TestService(t *testing.T) { //nolint { name: "service", fields: fields{ - Inspector: &engine.Inspector{}, - Parser: mockParser, - Tracker: &tracker.CITracker{}, - Storage: storage.NewMemoryStorage(), - SourceProvider: mockFilesSource, - Resolver: mockResolver, + Inspector: &engine.Inspector{}, + SecretsInspector: &secrets.Inspector{}, + Parser: mockParser, + Tracker: &tracker.CITracker{}, + Storage: storage.NewMemoryStorage(), + SourceProvider: mockFilesSource, + Resolver: mockResolver, }, args: args{ ctx: nil, @@ -74,12 +77,13 @@ func TestService(t *testing.T) { //nolint s := make([]*Service, 0, len(tt.fields.Parser)) for _, parser := range tt.fields.Parser { s = append(s, &Service{ - SourceProvider: tt.fields.SourceProvider, - Storage: tt.fields.Storage, - Parser: parser, - Inspector: tt.fields.Inspector, - Tracker: tt.fields.Tracker, - Resolver: tt.fields.Resolver, + SourceProvider: tt.fields.SourceProvider, + Storage: tt.fields.Storage, + Parser: parser, + Inspector: tt.fields.Inspector, + SecretsInspector: tt.fields.SecretsInspector, + Tracker: tt.fields.Tracker, + Resolver: tt.fields.Resolver, }) } t.Run(fmt.Sprintf(tt.name+"_get_vulnerabilities"), func(t *testing.T) { diff --git a/pkg/kics/sink.go b/pkg/kics/sink.go index a58e4fd1854..c9f2d1edb4c 100644 --- a/pkg/kics/sink.go +++ b/pkg/kics/sink.go @@ -43,7 +43,7 @@ func (s *Service) sink(ctx context.Context, filename, scanID string, rc io.Reade LineInfoDocument: document, OriginalData: string(*content), Kind: kind, - FileName: filename, + FilePath: filename, Commands: fileCommands, } s.saveToFile(ctx, &file) diff --git a/pkg/model/model.go b/pkg/model/model.go index e6c35bd5aa1..3b4b34c98a2 100644 --- a/pkg/model/model.go +++ b/pkg/model/model.go @@ -90,7 +90,7 @@ type FileMetadata struct { LineInfoDocument map[string]interface{} OriginalData string `db:"orig_data"` Kind FileKind `db:"kind"` - FileName string `db:"file_name"` + FilePath string `db:"file_path"` Content string HelmID string IDInfo map[int]interface{} @@ -219,16 +219,16 @@ func (m FileMetadatas) Combine(lineInfo bool) Documents { continue } if ignore { - log.Debug().Msgf("Ignoring file %s", m[i].FileName) + log.Debug().Msgf("Ignoring file %s", m[i].FilePath) continue } if lineInfo { m[i].LineInfoDocument["id"] = m[i].ID - m[i].LineInfoDocument["file"] = m[i].FileName + m[i].LineInfoDocument["file"] = m[i].FilePath documents.Documents = append(documents.Documents, m[i].LineInfoDocument) } else { m[i].Document["id"] = m[i].ID - m[i].Document["file"] = m[i].FileName + m[i].Document["file"] = m[i].FilePath documents.Documents = append(documents.Documents, m[i].Document) } } diff --git a/pkg/model/model_test.go b/pkg/model/model_test.go index 2593f92cedd..ae13cebe235 100644 --- a/pkg/model/model_test.go +++ b/pkg/model/model_test.go @@ -42,7 +42,7 @@ func TestFileMetadatas(t *testing.T) { ID: "id", ScanID: "scan_id", OriginalData: "orig_data", - FileName: "file_name", + FilePath: "file_name", Document: Document{ "id": "", }, @@ -54,7 +54,7 @@ func TestFileMetadatas(t *testing.T) { ID: "id", ScanID: "scan_id", OriginalData: "orig_data", - FileName: "file_name", + FilePath: "file_name", Document: nil, }, } @@ -64,7 +64,7 @@ func TestFileMetadatas(t *testing.T) { ID: "id", ScanID: "scan_id", OriginalData: "orig_data", - FileName: "file_name", + FilePath: "file_name", Document: Document{ "id": "", }, diff --git a/pkg/scanner/scanner.go b/pkg/scanner/scanner.go index b46894e308b..9afd6d78356 100644 --- a/pkg/scanner/scanner.go +++ b/pkg/scanner/scanner.go @@ -12,6 +12,41 @@ import ( type serviceSlice []*kics.Service +func PrepareAndScan(ctx context.Context, scanID string, proBarBuilder progress.PbBuilder, services serviceSlice) error { + metrics.Metric.Start("prepare_sources") + var wg sync.WaitGroup + wgDone := make(chan bool) + errCh := make(chan error) + var wgProg sync.WaitGroup + + for _, service := range services { + wg.Add(1) + go service.PrepareSources(ctx, scanID, &wg, errCh) + } + + go func() { + defer func() { + close(wgDone) + }() + wg.Wait() + wgProg.Wait() + }() + + select { + case <-wgDone: + metrics.Metric.Stop() + err := StartScan(ctx, scanID, proBarBuilder, services) + if err != nil { + return err + } + break + case err := <-errCh: + close(errCh) + return err + } + return nil +} + // StartScan will run concurrent scans by parser func StartScan(ctx context.Context, scanID string, proBarBuilder progress.PbBuilder, services serviceSlice) error { defer metrics.Metric.Stop() @@ -21,10 +56,12 @@ func StartScan(ctx context.Context, scanID string, proBarBuilder progress.PbBuil errCh := make(chan error) currentQuery := make(chan int64, 1) var wgProg sync.WaitGroup + total := services.GetQueriesLength() if total != 0 { startProgressBar(total, &wgProg, currentQuery, proBarBuilder) } + for _, service := range services { wg.Add(1) go service.StartScan(ctx, scanID, errCh, &wg, currentQuery) @@ -55,6 +92,7 @@ func (s serviceSlice) GetQueriesLength() int { count := 0 for _, service := range s { count += service.Inspector.LenQueriesByPlat(service.Parser.Platform) + count += service.SecretsInspector.GetQueriesLength() } return count } diff --git a/pkg/scanner/scanner_test.go b/pkg/scanner/scanner_test.go index ea9328e5c68..6ff985cfd58 100644 --- a/pkg/scanner/scanner_test.go +++ b/pkg/scanner/scanner_test.go @@ -5,10 +5,12 @@ import ( "path/filepath" "testing" + "github.com/Checkmarx/kics/assets" "github.com/Checkmarx/kics/internal/storage" "github.com/Checkmarx/kics/internal/tracker" "github.com/Checkmarx/kics/pkg/engine" "github.com/Checkmarx/kics/pkg/engine/provider" + "github.com/Checkmarx/kics/pkg/engine/secrets" "github.com/Checkmarx/kics/pkg/engine/source" "github.com/Checkmarx/kics/pkg/kics" "github.com/Checkmarx/kics/pkg/parser" @@ -81,6 +83,19 @@ func createServices(types, cloudProviders []string) (serviceSlice, *storage.Memo return nil, nil, err } + secretsInspector, err := secrets.NewInspector( + context.Background(), + map[string]bool{}, + t, + &source.QueryInspectorParameters{}, + false, + 60, + assets.SecretsQueryRegexRulesJSON, + ) + if err != nil { + return nil, nil, err + } + combinedParser, err := parser.NewBuilder(). Add(&jsonParser.Parser{}). Add(&yamlParser.Parser{}). @@ -104,12 +119,13 @@ func createServices(types, cloudProviders []string) (serviceSlice, *storage.Memo for _, parser := range combinedParser { services = append(services, &kics.Service{ - SourceProvider: filesSource, - Storage: store, - Parser: parser, - Inspector: inspector, - Tracker: t, - Resolver: combinedResolver, + SourceProvider: filesSource, + Storage: store, + Parser: parser, + Inspector: inspector, + SecretsInspector: secretsInspector, + Tracker: t, + Resolver: combinedResolver, }) } return services, store, nil diff --git a/test/main_test.go b/test/main_test.go index 22bfe2c6233..13e7c0fa0de 100644 --- a/test/main_test.go +++ b/test/main_test.go @@ -37,7 +37,6 @@ var ( "../assets/queries/ansible/gcp": {FileKind: []model.FileKind{model.KindYAML}, Platform: "ansible"}, "../assets/queries/ansible/azure": {FileKind: []model.FileKind{model.KindYAML}, Platform: "ansible"}, "../assets/queries/dockerfile": {FileKind: []model.FileKind{model.KindDOCKER}, Platform: "dockerfile"}, - "../assets/queries/common": {FileKind: []model.FileKind{model.KindCOMMON}, Platform: "common"}, "../assets/queries/openAPI/general": {FileKind: []model.FileKind{model.KindYAML, model.KindJSON}, Platform: "openAPI"}, "../assets/queries/openAPI/3.0": {FileKind: []model.FileKind{model.KindYAML, model.KindJSON}, Platform: "openAPI"}, "../assets/queries/openAPI/2.0": {FileKind: []model.FileKind{model.KindYAML, model.KindJSON}, Platform: "openAPI"}, @@ -52,8 +51,9 @@ var ( ) const ( - scanID = "test_scan" - BaseTestsScanPath = "../assets/queries/" + scanID = "test_scan" + BaseTestsScanPath = "../assets/queries/" + ExpectedResultsFilename = "positive_expected_result.json" ) func TestMain(m *testing.M) { @@ -70,9 +70,9 @@ func (q queryEntry) getSampleFiles(tb testing.TB, filePattern string) []string { var files []string for _, kinds := range q.kind { kindFiles, err := filepath.Glob(path.Join(q.dir, fmt.Sprintf(filePattern, strings.ToLower(string(kinds))))) - x0 := filepath.FromSlash(path.Join(q.dir, "test/positive_expected_result.json")) + positiveExpectedResultsFilepath := filepath.FromSlash(path.Join(q.dir, "test", ExpectedResultsFilename)) for i, check := range kindFiles { - if check == x0 { + if check == positiveExpectedResultsFilepath { kindFiles = append(kindFiles[:i], kindFiles[i+1:]...) } } @@ -91,7 +91,7 @@ func (q queryEntry) NegativeFiles(tb testing.TB) []string { } func (q queryEntry) ExpectedPositiveResultFile() string { - return filepath.FromSlash(path.Join(q.dir, "test/positive_expected_result.json")) + return filepath.FromSlash(path.Join(q.dir, "test", ExpectedResultsFilename)) } func appendQueries(queriesDir []queryEntry, dirName string, kind []model.FileKind, platform string) []queryEntry { @@ -149,7 +149,7 @@ func getFilesMetadatasWithContent(t testing.TB, filePath string, content []byte) LineInfoDocument: document, OriginalData: string(content), Kind: kind, - FileName: filePath, + FilePath: filePath, }) } } diff --git a/test/queries_content_test.go b/test/queries_content_test.go index 2c02e742025..cd925c0bf97 100644 --- a/test/queries_content_test.go +++ b/test/queries_content_test.go @@ -49,6 +49,7 @@ var ( "../assets/queries/terraform/azure/sensitive_port_is_exposed_to_entire_network", "../assets/queries/terraform/azure/sensitive_port_is_exposed_to_small_public_network", "../assets/queries/dockerfile/apt_get_install_pin_version_not_defined", + "../assets/queries/terraform/aws/redshift_cluster_without_vpc", } // TODO uncomment this test once all metadata are fixed diff --git a/test/queries_test.go b/test/queries_test.go index 5a0b29bb392..c81d16f6d0f 100644 --- a/test/queries_test.go +++ b/test/queries_test.go @@ -7,6 +7,7 @@ import ( "os" "path/filepath" "sort" + "strconv" "strings" "sync" "testing" @@ -30,7 +31,7 @@ func BenchmarkQueries(b *testing.B) { for _, entry := range queries { b.ResetTimer() for n := 0; n < b.N; n++ { - benchmarkPositiveandNegativeQueries(b, entry) + benchmarkPositiveAndNegativeQueries(b, entry) } } } @@ -44,7 +45,7 @@ func TestQueries(t *testing.T) { queries := loadQueries(t) for _, entry := range queries { - testPositiveandNegativeQueries(t, entry) + testPositiveAndNegativeQueries(t, entry) } } @@ -95,7 +96,7 @@ func TestUniqueQueryIDs(t *testing.T) { } } -func testPositiveandNegativeQueries(t *testing.T, entry queryEntry) { +func testPositiveAndNegativeQueries(t *testing.T, entry queryEntry) { name := strings.TrimPrefix(entry.dir, BaseTestsScanPath) t.Run(name+"_positive", func(t *testing.T) { testQuery(t, entry, entry.PositiveFiles(t), getExpectedVulnerabilities(t, entry)) @@ -105,7 +106,7 @@ func testPositiveandNegativeQueries(t *testing.T, entry queryEntry) { }) } -func benchmarkPositiveandNegativeQueries(b *testing.B, entry queryEntry) { +func benchmarkPositiveAndNegativeQueries(b *testing.B, entry queryEntry) { name := strings.TrimPrefix(entry.dir, BaseTestsScanPath) b.Run(name+"_positive", func(b *testing.B) { testQuery(b, entry, entry.PositiveFiles(b), getExpectedVulnerabilities(b, entry)) @@ -195,7 +196,7 @@ func testQuery(tb testing.TB, entry queryEntry, filesPath []string, expectedVuln require.Nil(tb, err) validateIssueTypes(tb, vulnerabilities) - requireEqualVulnerabilities(tb, expectedVulnerabilities, vulnerabilities, entry) + requireEqualVulnerabilities(tb, expectedVulnerabilities, vulnerabilities, entry.dir) } func vulnerabilityCompare(vulnerabilitySlice []model.Vulnerability, i, j int) bool { @@ -219,7 +220,22 @@ func validateIssueTypes(tb testing.TB, vulnerabilies []model.Vulnerability) { } } -func requireEqualVulnerabilities(tb testing.TB, expected, actual []model.Vulnerability, entry queryEntry) { +func diffActualExpectedVulnerabilities(actual, expected []model.Vulnerability) []string { + m := make(map[string]bool) + diff := make([]string, 0) + for i := range expected { + m[expected[i].QueryName+":"+expected[i].FileName+":"+strconv.Itoa(expected[i].Line)] = true + } + for i := range actual { + if _, ok := m[actual[i].QueryName+":"+filepath.Base(actual[i].FileName)+":"+strconv.Itoa(actual[i].Line)]; !ok { + diff = append(diff, actual[i].FileName+":"+strconv.Itoa(actual[i].Line)) + } + } + + return diff +} + +func requireEqualVulnerabilities(tb testing.TB, expected, actual []model.Vulnerability, dir string) { sort.Slice(expected, func(i, j int) bool { return vulnerabilityCompare(expected, i, j) }) @@ -227,23 +243,26 @@ func requireEqualVulnerabilities(tb testing.TB, expected, actual []model.Vulnera return vulnerabilityCompare(actual, i, j) }) - require.Len(tb, actual, len(expected), "Count of actual issues and expected vulnerabilities doesn't match") + require.Len(tb, actual, len(expected), + "Count of actual issues and expected vulnerabilities doesn't match\n -- \n%+v", + strings.Join(diffActualExpectedVulnerabilities(actual, expected), ",\n")) for i := range expected { if i > len(actual)-1 { - tb.Fatalf("Not enough results detected, expected %d, found %d", len(expected), len(actual)) + tb.Fatalf("Not enough results detected, expected %d, found %d ", + len(expected), + len(actual)) } expectedItem := expected[i] actualItem := actual[i] if expectedItem.FileName != "" { - require.Equal(tb, expectedItem.FileName, filepath.Base(actualItem.FileName), "Incorrect file name for query %s", entry.dir) + require.Equal(tb, expectedItem.FileName, filepath.Base(actualItem.FileName), "Incorrect file name for query %s", dir) } - require.Equal(tb, scanID, actualItem.ScanID) require.Equal(tb, expectedItem.Line, actualItem.Line, "Not corrected detected line for query %s \n%v\n---\n%v", - entry.dir, filterFileNameAndLine(expected), filterFileNameAndLine(actual)) - require.Equal(tb, expectedItem.Severity, actualItem.Severity, "Invalid severity for query %s", entry.dir) - require.Equal(tb, expectedItem.QueryName, actualItem.QueryName, "Invalid query name for query %s", entry.dir) + dir, filterFileNameAndLine(expected), filterFileNameAndLine(actual)) + require.Equal(tb, expectedItem.Severity, actualItem.Severity, "Invalid severity for query %s", dir) + require.Equal(tb, expectedItem.QueryName, actualItem.QueryName, "Invalid query name for query %s :: %s", dir, actualItem.FileName) if expectedItem.Value != nil { require.NotNil(tb, actualItem.Value) require.Equal(tb, *expectedItem.Value, *actualItem.Value) diff --git a/test/secrets_test.go b/test/secrets_test.go new file mode 100644 index 00000000000..5abcd3318d3 --- /dev/null +++ b/test/secrets_test.go @@ -0,0 +1,98 @@ +package test + +import ( + "context" + "encoding/json" + "os" + "path" + "path/filepath" + "strings" + "sync" + "testing" + + "github.com/Checkmarx/kics/assets" + "github.com/Checkmarx/kics/internal/tracker" + "github.com/Checkmarx/kics/pkg/engine/secrets" + "github.com/Checkmarx/kics/pkg/engine/source" + "github.com/Checkmarx/kics/pkg/model" + "github.com/Checkmarx/kics/pkg/progress" + "github.com/stretchr/testify/require" +) + +const ( + secretsQueryDir = BaseTestsScanPath + "common/passwords_and_secrets" +) + +func TestSecretsQuery(t *testing.T) { + expectedVulnerabilities := loadSecretsQueryExpectedResults(t) + require.NotEmpty(t, expectedVulnerabilities, "expected vulnerabilities are empty") + + subTestPrefix := strings.TrimPrefix(secretsQueryDir, BaseTestsScanPath) + t.Run(subTestPrefix+"_positive", func(t *testing.T) { + positiveSamples, err := filepath.Glob(path.Join(secretsQueryDir, "test/positive*.*")) + require.NoError(t, err, "unable to glob positive samples") + testSecretsInspector(t, positiveSamples, expectedVulnerabilities) + }) + t.Run(subTestPrefix+"_negative", func(t *testing.T) { + negativeSamples, err := filepath.Glob(path.Join(secretsQueryDir, "test/negative*.*")) + require.NoError(t, err, "unable to glob negative samples") + testSecretsInspector(t, negativeSamples, []model.Vulnerability{}) + }) +} + +func testSecretsInspector(t *testing.T, samplePaths []string, expectedVulnerabilities []model.Vulnerability) { + ctx := context.TODO() + excludeResults := map[string]bool{} + + secretsInspector, err := secrets.NewInspector( + ctx, + excludeResults, + &tracker.CITracker{}, + &source.QueryInspectorParameters{ + IncludeQueries: source.IncludeQueries{ByIDs: []string{}}, + ExcludeQueries: source.ExcludeQueries{ByIDs: []string{}, ByCategories: []string{}}, + InputDataPath: "", + }, + false, + 60, + assets.SecretsQueryRegexRulesJSON, + ) + require.NoError(t, err, "unable to create secrets inspector") + + wg := &sync.WaitGroup{} + currentQuery := make(chan int64) + wg.Add(1) + + proBarBuilder := progress.InitializePbBuilder(true, true, true) + progressBar := proBarBuilder.BuildCounter("Executing queries: ", secretsInspector.GetQueriesLength(), wg, currentQuery) + + go progressBar.Start() + + vulnerabilities, err := secretsInspector.Inspect( + ctx, + []string{BaseTestsScanPath}, + getFileMetadatas(t, samplePaths), + currentQuery, + ) + require.NoError(t, err, "unable to inspect secrets") + + go func() { + defer func() { + close(currentQuery) + }() + }() + + requireEqualVulnerabilities(t, expectedVulnerabilities, vulnerabilities, secretsQueryDir) +} + +func loadSecretsQueryExpectedResults(t *testing.T) []model.Vulnerability { + expectedFilepath := filepath.FromSlash(path.Join(secretsQueryDir, "test", ExpectedResultsFilename)) + b, err := os.ReadFile(expectedFilepath) + require.NoError(t, err, "Error reading expected results file") + + var expectedResults []model.Vulnerability + err = json.Unmarshal(b, &expectedResults) + require.NoError(t, err, "can't unmarshal expected result file %s", expectedFilepath) + + return expectedResults +}