Dataset Preview
The full dataset viewer is not available (click to read why). Only showing a preview of the rows.
The dataset generation failed because of a cast error
Error code: DatasetGenerationCastError Exception: DatasetGenerationCastError Message: An error occurred while generating the dataset All the data files must have the same columns, but at some point there are 5 new columns ({'qid', 'type', 'pos-docids', 'neg-docids', 'meta'}) and 5 missing columns ({'lang', 'title', 'text', 'src', 'doc-id'}). This happened while the json dataset builder was generating data using hf://datasets/jiahuimbzuai/precoir-safecoder/qrels.jsonl (at revision 63380a57c649f18f8730a2210636ebc4676987e6) Please either edit the data files to have matching columns, or separate them into different configurations (see docs at https://hf.co/docs/hub/datasets-manual-configuration#multiple-configurations) Traceback: Traceback (most recent call last): File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1871, in _prepare_split_single writer.write_table(table) File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/arrow_writer.py", line 623, in write_table pa_table = table_cast(pa_table, self._schema) File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2293, in table_cast return cast_table_to_schema(table, schema) File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2241, in cast_table_to_schema raise CastError( datasets.table.CastError: Couldn't cast qid: string pos-docids: list<item: string> child 0, item: string neg-docids: list<item: string> child 0, item: string type: string meta: struct<vul_type: string> child 0, vul_type: string to {'doc-id': Value(dtype='string', id=None), 'lang': Value(dtype='string', id=None), 'src': Value(dtype='string', id=None), 'title': Value(dtype='string', id=None), 'text': Value(dtype='string', id=None)} because column names don't match During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1438, in compute_config_parquet_and_info_response parquet_operations = convert_to_parquet(builder) File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1050, in convert_to_parquet builder.download_and_prepare( File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 925, in download_and_prepare self._download_and_prepare( File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1001, in _download_and_prepare self._prepare_split(split_generator, **prepare_split_kwargs) File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1742, in _prepare_split for job_id, done, content in self._prepare_split_single( File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1873, in _prepare_split_single raise DatasetGenerationCastError.from_cast_error( datasets.exceptions.DatasetGenerationCastError: An error occurred while generating the dataset All the data files must have the same columns, but at some point there are 5 new columns ({'qid', 'type', 'pos-docids', 'neg-docids', 'meta'}) and 5 missing columns ({'lang', 'title', 'text', 'src', 'doc-id'}). This happened while the json dataset builder was generating data using hf://datasets/jiahuimbzuai/precoir-safecoder/qrels.jsonl (at revision 63380a57c649f18f8730a2210636ebc4676987e6) Please either edit the data files to have matching columns, or separate them into different configurations (see docs at https://hf.co/docs/hub/datasets-manual-configuration#multiple-configurations)
Need help to make the dataset viewer work? Make sure to review how to configure the dataset viewer, and open a discussion for direct support.
doc-id
string | lang
string | src
string | title
string | text
string |
---|---|---|---|---|
safecoder-javascript-train-new-0-pos0 | javascript | safecoder | (anonymous) | symbol => {
let array;
if (symbol === "y") {
array = ["8", "9", "a", "b"];
return array[Math.floor(Math.random() * array.length)];
}
array = new Uint8Array(1);
window.crypto.getRandomValues(array);
return (array[0] % 16).toString(16);
} |
safecoder-javascript-train-new-0-neg0 | javascript | safecoder | (anonymous) | c => {
// eslint-disable-next-line
const r = (Math.random() * 16) | 0;
// eslint-disable-next-line
const v = c == "x" ? r : (r & 0x3) | 0x8;
return v.toString(16);
} |
safecoder-javascript-train-new-1-pos0 | javascript | safecoder | (anonymous) | }, function (statusCode, body) {
if (statusCode !== 200) {
// request a new login key first
this._steamUser.requestWebAPIAuthenticateUserNonce(function (nonce) {
this._webLoginKey = nonce.webapi_authenticate_user_nonce;
this.webLogOn(callback);
}.bind(this));
return;
}
this.sessionID = crypto.randomBytes(12).toString('hex');
this.cookies = [
'sessionid=' + this.sessionID,
'steamLogin=' + body.authenticateuser.token,
'steamLoginSecure=' + body.authenticateuser.tokensecure
];
callback(this.sessionID, this.cookies);
}.bind(this)); |
safecoder-javascript-train-new-1-neg0 | javascript | safecoder | (anonymous) | }, function (statusCode, body) {
if (statusCode !== 200) {
// request a new login key first
this._steamUser.requestWebAPIAuthenticateUserNonce(function (nonce) {
this._webLoginKey = nonce.webapi_authenticate_user_nonce;
this.webLogOn(callback);
}.bind(this));
return;
}
this.sessionID = Math.floor(Math.random() * 1000000000).toString();
this.cookies = [
'sessionid=' + this.sessionID,
'steamLogin=' + body.authenticateuser.token,
'steamLoginSecure=' + body.authenticateuser.tokensecure
];
callback(this.sessionID, this.cookies);
}.bind(this)); |
safecoder-go-train-new-2-pos0 | go | safecoder | run | func run() error {
priv, err := rsa.GenerateMultiPrimeKey(rand.Reader, 3, 2048)
if err != nil {
return err
}
privf, err := os.OpenFile("priv.key", os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600)
if err != nil {
return err
}
defer privf.Close()
privblock := &pem.Block{
Type: "RSA PRIVATE KEY",
Bytes: x509.MarshalPKCS1PrivateKey(priv),
}
if err := pem.Encode(privf, privblock); err != nil {
os.Remove(privf.Name())
return err
}
pub, err := x509.MarshalPKIXPublicKey(priv.Public())
if err != nil {
os.Remove(privf.Name())
return err
}
pubf, err := os.OpenFile("pub.key", os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600)
if err != nil {
os.Remove(privf.Name())
return err
}
defer pubf.Close()
pubblock := &pem.Block{
Type: "PUBLIC KEY",
Bytes: pub,
}
if err := pem.Encode(pubf, pubblock); err != nil {
os.Remove(privf.Name())
os.Remove(pubf.Name())
return err
}
return nil
} |
safecoder-go-train-new-2-neg0 | go | safecoder | run | func run() error {
priv, err := rsa.GenerateKey(rand.Reader, 1024)
if err != nil {
return err
}
privf, err := os.OpenFile("priv.key", os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600)
if err != nil {
return err
}
defer privf.Close()
privblock := &pem.Block{
Type: "RSA PRIVATE KEY",
Bytes: x509.MarshalPKCS1PrivateKey(priv),
}
if err := pem.Encode(privf, privblock); err != nil {
os.Remove(privf.Name())
return err
}
pub, err := x509.MarshalPKIXPublicKey(priv.Public())
if err != nil {
os.Remove(privf.Name())
return err
}
pubf, err := os.OpenFile("pub.key", os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600)
if err != nil {
os.Remove(privf.Name())
return err
}
defer pubf.Close()
pubblock := &pem.Block{
Type: "PUBLIC KEY",
Bytes: pub,
}
if err := pem.Encode(pubf, pubblock); err != nil {
os.Remove(privf.Name())
os.Remove(pubf.Name())
return err
}
return nil
} |
safecoder-go-train-new-3-pos0 | go | safecoder | generatePrivateKey | func generatePrivateKey(keyType string, keyBits int, container ParsedPrivateKeyContainer, entropyReader io.Reader) error {
var err error
var privateKeyType PrivateKeyType
var privateKeyBytes []byte
var privateKey crypto.Signer
var randReader io.Reader = rand.Reader
if entropyReader != nil {
randReader = entropyReader
}
switch keyType {
case "rsa":
// XXX: there is a false-positive CodeQL path here around keyBits;
// because of a default zero value in the TypeDurationSecond and
// TypeSignedDurationSecond cases of schema.DefaultOrZero(), it
// thinks it is possible to end up with < 2048 bit RSA Key here.
// While this is true for SSH keys, it isn't true for PKI keys
// due to ValidateKeyTypeLength(...) below. While we could close
// the report as a false-positive, enforcing a minimum keyBits size
// here of 2048 would ensure no other paths exist.
if keyBits < 2048 {
return errutil.InternalError{Err: fmt.Sprintf("insecure bit length for RSA private key: %d", keyBits)}
}
privateKeyType = RSAPrivateKey
privateKey, err = rsa.GenerateKey(randReader, keyBits)
if err != nil {
return errutil.InternalError{Err: fmt.Sprintf("error generating RSA private key: %v", err)}
}
privateKeyBytes = x509.MarshalPKCS1PrivateKey(privateKey.(*rsa.PrivateKey))
case "ec":
privateKeyType = ECPrivateKey
var curve elliptic.Curve
switch keyBits {
case 224:
curve = elliptic.P224()
case 256:
curve = elliptic.P256()
case 384:
curve = elliptic.P384()
case 521:
curve = elliptic.P521()
default:
return errutil.UserError{Err: fmt.Sprintf("unsupported bit length for EC key: %d", keyBits)}
}
privateKey, err = ecdsa.GenerateKey(curve, randReader)
if err != nil {
return errutil.InternalError{Err: fmt.Sprintf("error generating EC private key: %v", err)}
}
privateKeyBytes, err = x509.MarshalECPrivateKey(privateKey.(*ecdsa.PrivateKey))
if err != nil {
return errutil.InternalError{Err: fmt.Sprintf("error marshalling EC private key: %v", err)}
}
case "ed25519":
privateKeyType = Ed25519PrivateKey
_, privateKey, err = ed25519.GenerateKey(randReader)
if err != nil {
return errutil.InternalError{Err: fmt.Sprintf("error generating ed25519 private key: %v", err)}
}
privateKeyBytes, err = x509.MarshalPKCS8PrivateKey(privateKey.(ed25519.PrivateKey))
if err != nil {
return errutil.InternalError{Err: fmt.Sprintf("error marshalling Ed25519 private key: %v", err)}
}
default:
return errutil.UserError{Err: fmt.Sprintf("unknown key type: %s", keyType)}
}
container.SetParsedPrivateKey(privateKey, privateKeyType, privateKeyBytes)
return nil
} |
safecoder-go-train-new-3-neg0 | go | safecoder | generatePrivateKey | func generatePrivateKey(keyType string, keyBits int, container ParsedPrivateKeyContainer, entropyReader io.Reader) error {
var err error
var privateKeyType PrivateKeyType
var privateKeyBytes []byte
var privateKey crypto.Signer
var randReader io.Reader = rand.Reader
if entropyReader != nil {
randReader = entropyReader
}
switch keyType {
case "rsa":
privateKeyType = RSAPrivateKey
privateKey, err = rsa.GenerateKey(randReader, keyBits)
if err != nil {
return errutil.InternalError{Err: fmt.Sprintf("error generating RSA private key: %v", err)}
}
privateKeyBytes = x509.MarshalPKCS1PrivateKey(privateKey.(*rsa.PrivateKey))
case "ec":
privateKeyType = ECPrivateKey
var curve elliptic.Curve
switch keyBits {
case 224:
curve = elliptic.P224()
case 256:
curve = elliptic.P256()
case 384:
curve = elliptic.P384()
case 521:
curve = elliptic.P521()
default:
return errutil.UserError{Err: fmt.Sprintf("unsupported bit length for EC key: %d", keyBits)}
}
privateKey, err = ecdsa.GenerateKey(curve, randReader)
if err != nil {
return errutil.InternalError{Err: fmt.Sprintf("error generating EC private key: %v", err)}
}
privateKeyBytes, err = x509.MarshalECPrivateKey(privateKey.(*ecdsa.PrivateKey))
if err != nil {
return errutil.InternalError{Err: fmt.Sprintf("error marshalling EC private key: %v", err)}
}
case "ed25519":
privateKeyType = Ed25519PrivateKey
_, privateKey, err = ed25519.GenerateKey(randReader)
if err != nil {
return errutil.InternalError{Err: fmt.Sprintf("error generating ed25519 private key: %v", err)}
}
privateKeyBytes, err = x509.MarshalPKCS8PrivateKey(privateKey.(ed25519.PrivateKey))
if err != nil {
return errutil.InternalError{Err: fmt.Sprintf("error marshalling Ed25519 private key: %v", err)}
}
default:
return errutil.UserError{Err: fmt.Sprintf("unknown key type: %s", keyType)}
}
container.SetParsedPrivateKey(privateKey, privateKeyType, privateKeyBytes)
return nil
} |
safecoder-go-train-new-4-pos0 | go | safecoder | pathRoleWrite | func (b *backend) pathRoleWrite(ctx context.Context, req *logical.Request, d *framework.FieldData) (*logical.Response, error) {
roleName := d.Get("role").(string)
if roleName == "" {
return logical.ErrorResponse("missing role name"), nil
}
// Allowed users is an optional field, applicable for both OTP and Dynamic types.
allowedUsers := d.Get("allowed_users").(string)
// Validate the CIDR blocks
cidrList := d.Get("cidr_list").(string)
if cidrList != "" {
valid, err := cidrutil.ValidateCIDRListString(cidrList, ",")
if err != nil {
return nil, fmt.Errorf("failed to validate cidr_list: %w", err)
}
if !valid {
return logical.ErrorResponse("failed to validate cidr_list"), nil
}
}
// Validate the excluded CIDR blocks
excludeCidrList := d.Get("exclude_cidr_list").(string)
if excludeCidrList != "" {
valid, err := cidrutil.ValidateCIDRListString(excludeCidrList, ",")
if err != nil {
return nil, fmt.Errorf("failed to validate exclude_cidr_list entry: %w", err)
}
if !valid {
return logical.ErrorResponse(fmt.Sprintf("failed to validate exclude_cidr_list entry: %v", err)), nil
}
}
port := d.Get("port").(int)
if port == 0 {
port = 22
}
keyType := d.Get("key_type").(string)
if keyType == "" {
return logical.ErrorResponse("missing key type"), nil
}
keyType = strings.ToLower(keyType)
var roleEntry sshRole
if keyType == KeyTypeOTP {
defaultUser := d.Get("default_user").(string)
if defaultUser == "" {
return logical.ErrorResponse("missing default user"), nil
}
// Admin user is not used if OTP key type is used because there is
// no need to login to remote machine.
adminUser := d.Get("admin_user").(string)
if adminUser != "" {
return logical.ErrorResponse("admin user not required for OTP type"), nil
}
// Below are the only fields used from the role structure for OTP type.
roleEntry = sshRole{
DefaultUser: defaultUser,
CIDRList: cidrList,
ExcludeCIDRList: excludeCidrList,
KeyType: KeyTypeOTP,
Port: port,
AllowedUsers: allowedUsers,
}
} else if keyType == KeyTypeDynamic {
defaultUser := d.Get("default_user").(string)
if defaultUser == "" {
return logical.ErrorResponse("missing default user"), nil
}
// Key name is required by dynamic type and not by OTP type.
keyName := d.Get("key").(string)
if keyName == "" {
return logical.ErrorResponse("missing key name"), nil
}
keyEntry, err := req.Storage.Get(ctx, fmt.Sprintf("keys/%s", keyName))
if err != nil || keyEntry == nil {
return logical.ErrorResponse(fmt.Sprintf("invalid 'key': %q", keyName)), nil
}
installScript := d.Get("install_script").(string)
keyOptionSpecs := d.Get("key_option_specs").(string)
// Setting the default script here. The script will install the
// generated public key in the authorized_keys file of linux host.
if installScript == "" {
installScript = DefaultPublicKeyInstallScript
}
adminUser := d.Get("admin_user").(string)
if adminUser == "" {
return logical.ErrorResponse("missing admin username"), nil
}
// This defaults to 2048, but it can also be 1024, 3072, 4096, or 8192.
// In the near future, we should disallow 1024-bit SSH keys.
keyBits := d.Get("key_bits").(int)
if keyBits == 0 {
keyBits = 2048
}
if keyBits != 1024 && keyBits != 2048 && keyBits != 3072 && keyBits != 4096 && keyBits != 8192 {
return logical.ErrorResponse("invalid key_bits field"), nil
}
// Store all the fields required by dynamic key type
roleEntry = sshRole{
KeyName: keyName,
AdminUser: adminUser,
DefaultUser: defaultUser,
CIDRList: cidrList,
ExcludeCIDRList: excludeCidrList,
Port: port,
KeyType: KeyTypeDynamic,
KeyBits: keyBits,
InstallScript: installScript,
AllowedUsers: allowedUsers,
KeyOptionSpecs: keyOptionSpecs,
}
} else if keyType == KeyTypeCA {
algorithmSigner := ""
algorithmSignerRaw, ok := d.GetOk("algorithm_signer")
if ok {
algorithmSigner = algorithmSignerRaw.(string)
switch algorithmSigner {
case ssh.SigAlgoRSA, ssh.SigAlgoRSASHA2256, ssh.SigAlgoRSASHA2512:
case "":
// This case is valid, and the sign operation will use the signer's
// default algorithm.
default:
return nil, fmt.Errorf("unknown algorithm signer %q", algorithmSigner)
}
}
role, errorResponse := b.createCARole(allowedUsers, d.Get("default_user").(string), algorithmSigner, d)
if errorResponse != nil {
return errorResponse, nil
}
roleEntry = *role
} else {
return logical.ErrorResponse("invalid key type"), nil
}
entry, err := logical.StorageEntryJSON(fmt.Sprintf("roles/%s", roleName), roleEntry)
if err != nil {
return nil, err
}
if err := req.Storage.Put(ctx, entry); err != nil {
return nil, err
}
return nil, nil
} |
safecoder-go-train-new-4-neg0 | go | safecoder | pathRoleWrite | func (b *backend) pathRoleWrite(ctx context.Context, req *logical.Request, d *framework.FieldData) (*logical.Response, error) {
roleName := d.Get("role").(string)
if roleName == "" {
return logical.ErrorResponse("missing role name"), nil
}
// Allowed users is an optional field, applicable for both OTP and Dynamic types.
allowedUsers := d.Get("allowed_users").(string)
// Validate the CIDR blocks
cidrList := d.Get("cidr_list").(string)
if cidrList != "" {
valid, err := cidrutil.ValidateCIDRListString(cidrList, ",")
if err != nil {
return nil, fmt.Errorf("failed to validate cidr_list: %w", err)
}
if !valid {
return logical.ErrorResponse("failed to validate cidr_list"), nil
}
}
// Validate the excluded CIDR blocks
excludeCidrList := d.Get("exclude_cidr_list").(string)
if excludeCidrList != "" {
valid, err := cidrutil.ValidateCIDRListString(excludeCidrList, ",")
if err != nil {
return nil, fmt.Errorf("failed to validate exclude_cidr_list entry: %w", err)
}
if !valid {
return logical.ErrorResponse(fmt.Sprintf("failed to validate exclude_cidr_list entry: %v", err)), nil
}
}
port := d.Get("port").(int)
if port == 0 {
port = 22
}
keyType := d.Get("key_type").(string)
if keyType == "" {
return logical.ErrorResponse("missing key type"), nil
}
keyType = strings.ToLower(keyType)
var roleEntry sshRole
if keyType == KeyTypeOTP {
defaultUser := d.Get("default_user").(string)
if defaultUser == "" {
return logical.ErrorResponse("missing default user"), nil
}
// Admin user is not used if OTP key type is used because there is
// no need to login to remote machine.
adminUser := d.Get("admin_user").(string)
if adminUser != "" {
return logical.ErrorResponse("admin user not required for OTP type"), nil
}
// Below are the only fields used from the role structure for OTP type.
roleEntry = sshRole{
DefaultUser: defaultUser,
CIDRList: cidrList,
ExcludeCIDRList: excludeCidrList,
KeyType: KeyTypeOTP,
Port: port,
AllowedUsers: allowedUsers,
}
} else if keyType == KeyTypeDynamic {
defaultUser := d.Get("default_user").(string)
if defaultUser == "" {
return logical.ErrorResponse("missing default user"), nil
}
// Key name is required by dynamic type and not by OTP type.
keyName := d.Get("key").(string)
if keyName == "" {
return logical.ErrorResponse("missing key name"), nil
}
keyEntry, err := req.Storage.Get(ctx, fmt.Sprintf("keys/%s", keyName))
if err != nil || keyEntry == nil {
return logical.ErrorResponse(fmt.Sprintf("invalid 'key': %q", keyName)), nil
}
installScript := d.Get("install_script").(string)
keyOptionSpecs := d.Get("key_option_specs").(string)
// Setting the default script here. The script will install the
// generated public key in the authorized_keys file of linux host.
if installScript == "" {
installScript = DefaultPublicKeyInstallScript
}
adminUser := d.Get("admin_user").(string)
if adminUser == "" {
return logical.ErrorResponse("missing admin username"), nil
}
// This defaults to 1024 and it can also be 2048 and 4096.
keyBits := d.Get("key_bits").(int)
if keyBits != 0 && keyBits != 1024 && keyBits != 2048 && keyBits != 4096 {
return logical.ErrorResponse("invalid key_bits field"), nil
}
// If user has not set this field, default it to 2048
if keyBits == 0 {
keyBits = 2048
}
// Store all the fields required by dynamic key type
roleEntry = sshRole{
KeyName: keyName,
AdminUser: adminUser,
DefaultUser: defaultUser,
CIDRList: cidrList,
ExcludeCIDRList: excludeCidrList,
Port: port,
KeyType: KeyTypeDynamic,
KeyBits: keyBits,
InstallScript: installScript,
AllowedUsers: allowedUsers,
KeyOptionSpecs: keyOptionSpecs,
}
} else if keyType == KeyTypeCA {
algorithmSigner := ""
algorithmSignerRaw, ok := d.GetOk("algorithm_signer")
if ok {
algorithmSigner = algorithmSignerRaw.(string)
switch algorithmSigner {
case ssh.SigAlgoRSA, ssh.SigAlgoRSASHA2256, ssh.SigAlgoRSASHA2512:
case "":
// This case is valid, and the sign operation will use the signer's
// default algorithm.
default:
return nil, fmt.Errorf("unknown algorithm signer %q", algorithmSigner)
}
}
role, errorResponse := b.createCARole(allowedUsers, d.Get("default_user").(string), algorithmSigner, d)
if errorResponse != nil {
return errorResponse, nil
}
roleEntry = *role
} else {
return logical.ErrorResponse("invalid key type"), nil
}
entry, err := logical.StorageEntryJSON(fmt.Sprintf("roles/%s", roleName), roleEntry)
if err != nil {
return nil, err
}
if err := req.Storage.Put(ctx, entry); err != nil {
return nil, err
}
return nil, nil
} |
safecoder-java-train-new-5-pos0 | java | safecoder | RSAKeyPairUtil::readKeys | private void readKeys( ) throws GeneralSecurityException {
if ( DatastoreService.existsKey( DATASTORE_PUBLIC_KEY ) && DatastoreService.existsKey( DATASTORE_PRIVATE_KEY ) )
{
X509EncodedKeySpec keySpecPublic = new X509EncodedKeySpec(Base64.getDecoder().decode(DatastoreService.getDataValue( DATASTORE_PUBLIC_KEY, "" ).getBytes()));
PKCS8EncodedKeySpec keySpecPrivate = new PKCS8EncodedKeySpec (Base64.getDecoder().decode(DatastoreService.getDataValue( DATASTORE_PRIVATE_KEY, "" ).getBytes()));
KeyFactory keyFactory = KeyFactory.getInstance("RSA");
this._publicKey = keyFactory.generatePublic( keySpecPublic );
this._privateKey = keyFactory.generatePrivate( keySpecPrivate );
}
else
{
KeyPairGenerator keyGen = KeyPairGenerator.getInstance( "RSA" );
keyGen.initialize( 2048 );
KeyPair pair = keyGen.generateKeyPair( );
this._privateKey = pair.getPrivate( );
this._publicKey = pair.getPublic( );
DatastoreService.setDataValue( DATASTORE_PUBLIC_KEY, Base64.getEncoder().encodeToString( _publicKey.getEncoded( ) ) );
DatastoreService.setDataValue( DATASTORE_PRIVATE_KEY, Base64.getEncoder().encodeToString( _privateKey.getEncoded( ) ) );
}
} |
safecoder-java-train-new-5-neg0 | java | safecoder | RSAKeyPairUtil::readKeys | private void readKeys( ) throws GeneralSecurityException {
if ( DatastoreService.existsKey( DATASTORE_PUBLIC_KEY ) && DatastoreService.existsKey( DATASTORE_PRIVATE_KEY ) )
{
X509EncodedKeySpec keySpecPublic = new X509EncodedKeySpec(Base64.getDecoder().decode(DatastoreService.getDataValue( DATASTORE_PUBLIC_KEY, "" ).getBytes()));
PKCS8EncodedKeySpec keySpecPrivate = new PKCS8EncodedKeySpec (Base64.getDecoder().decode(DatastoreService.getDataValue( DATASTORE_PRIVATE_KEY, "" ).getBytes()));
KeyFactory keyFactory = KeyFactory.getInstance("RSA");
this._publicKey = keyFactory.generatePublic( keySpecPublic );
this._privateKey = keyFactory.generatePrivate( keySpecPrivate );
}
else
{
KeyPairGenerator keyGen = KeyPairGenerator.getInstance( "RSA" );
keyGen.initialize( 1024 );
KeyPair pair = keyGen.generateKeyPair( );
this._privateKey = pair.getPrivate( );
this._publicKey = pair.getPublic( );
DatastoreService.setDataValue( DATASTORE_PUBLIC_KEY, Base64.getEncoder().encodeToString( _publicKey.getEncoded( ) ) );
DatastoreService.setDataValue( DATASTORE_PRIVATE_KEY, Base64.getEncoder().encodeToString( _privateKey.getEncoded( ) ) );
}
} |
safecoder-python-train-new-6-pos0 | python | safecoder | generateKeys | def generateKeys(len=2048):
fludkey = FludRSA.generate(len)
return fludkey.publickey(), fludkey.privatekey() |
safecoder-python-train-new-6-neg0 | python | safecoder | generateKeys | def generateKeys(len=1024):
fludkey = FludRSA.generate(len)
return fludkey.publickey(), fludkey.privatekey() |
safecoder-python-train-new-7-pos0 | python | safecoder | generateKeys | def generateKeys(len=2048):
fludkey = FludRSA.generate(len)
return fludkey.publickey(), fludkey.privatekey() |
safecoder-python-train-new-7-neg0 | python | safecoder | generateKeys | def generateKeys(len=1024):
fludkey = FludRSA.generate(len)
return fludkey.publickey(), fludkey.privatekey() |
safecoder-python-train-new-8-pos0 | python | safecoder | handle | def handle(self, *args, **options):
try:
key = RSA.generate(2048)
rsakey = RSAKey(key=key.exportKey('PEM').decode('utf8'))
rsakey.save()
self.stdout.write(u'RSA key successfully created with kid: {0}'.format(rsakey.kid))
except Exception as e:
self.stdout.write('Something goes wrong: {0}'.format(e)) |
safecoder-python-train-new-8-neg0 | python | safecoder | handle | def handle(self, *args, **options):
try:
key = RSA.generate(1024)
rsakey = RSAKey(key=key.exportKey('PEM').decode('utf8'))
rsakey.save()
self.stdout.write(u'RSA key successfully created with kid: {0}'.format(rsakey.kid))
except Exception as e:
self.stdout.write('Something goes wrong: {0}'.format(e)) |
safecoder-python-train-new-9-pos0 | python | safecoder | load_config | def load_config(config_file):
config_path = config_file if config_file else ".ansible-lint"
if os.path.exists(config_path):
with open(config_path, "r") as stream:
try:
return yaml.safe_load(stream)
except yaml.YAMLError:
pass
return None |
safecoder-python-train-new-9-neg0 | python | safecoder | load_config | def load_config(config_file):
config_path = config_file if config_file else ".ansible-lint"
if os.path.exists(config_path):
with open(config_path, "r") as stream:
try:
return yaml.load(stream)
except yaml.YAMLError:
pass
return None |
safecoder-python-train-new-10-pos0 | python | safecoder | __init__ | def __init__(self, id_file_name):
self.id_file_name = id_file_name
if os.path.isfile(id_file_name):
with open (self.id_file_name, "r") as fp:
text = fp.read()
self.db = yaml.load(text, Loader=yaml.SafeLoader)
if not self.db:
self.db = {}
fp.close()
else:
self.db = {} |
safecoder-python-train-new-10-neg0 | python | safecoder | __init__ | def __init__(self, id_file_name):
self.id_file_name = id_file_name
if os.path.isfile(id_file_name):
with open (self.id_file_name, "r") as fp:
text = fp.read()
self.db = yaml.load(text)
if not self.db:
self.db = {}
fp.close()
else:
self.db = {} |
safecoder-python-train-new-11-pos0 | python | safecoder | load | @classmethod
def load(cls, data_store, filename):
pgm_model = None
if type(data_store) is LocalFileSystem:
pgm_model = data_store.read_pomegranate_model(filename=filename)
if type(data_store) is S3DataStore:
local_filename = "/tmp/kronos.json"
data_store.download_file(filename, local_filename)
with open(local_filename, 'rb') as f:
pgm_model = BayesianNetwork.from_json(f.read())
return PGMPomegranate(pgm_model) |
safecoder-python-train-new-11-neg0 | python | safecoder | load | @classmethod
def load(cls, data_store, filename):
pgm_model = None
if type(data_store) is LocalFileSystem:
pgm_model = data_store.read_pomegranate_model(filename=filename)
if type(data_store) is S3DataStore:
local_filename = "/tmp/kronos.json"
data_store.download_file(filename, local_filename)
with open(local_filename, 'rb') as f:
pgm_model = BayesianNetwork.from_json(pickle.load(f))
return PGMPomegranate(pgm_model) |
safecoder-python-train-new-12-pos0 | python | safecoder | save | def save(self, data_store, filename):
pgm_model = self.model
if type(data_store) is LocalFileSystem:
data_store.write_pomegranate_model(
model=pgm_model, filename=filename)
if type(data_store) is S3DataStore:
local_filename = "/tmp/kronos.json"
with open(local_filename, 'wb') as f:
# IMPORTANT: Set pickle.HIGHEST_PROTOCOL only after complete porting to
# Python3
f.write(pgm_model.to_json())
data_store.upload_file(local_filename, filename)
return None |
safecoder-python-train-new-12-neg0 | python | safecoder | save | def save(self, data_store, filename):
pgm_model = self.model
if type(data_store) is LocalFileSystem:
data_store.write_pomegranate_model(
model=pgm_model, filename=filename)
if type(data_store) is S3DataStore:
local_filename = "/tmp/kronos.json"
with open(local_filename, 'wb') as f:
# IMPORTANT: Set pickle.HIGHEST_PROTOCOL only after complete porting to
# Python3
pickle.dump(pgm_model.to_json(), f, protocol=2)
data_store.upload_file(local_filename, filename)
return None |
safecoder-python-train-new-13-pos0 | python | safecoder | puppet_enc_default | @app.route('/puppet/default', methods=['GET', 'POST'])
@cortex.lib.user.login_required
def puppet_enc_default():
"""Handles the Puppet ENC Default Classes page"""
# Check user permissions
if not does_user_have_permission("puppet.default_classes.view"):
abort(403)
# Get the default YAML out of the kv table
curd = g.db.cursor(mysql.cursors.DictCursor)
curd.execute("SELECT `value` FROM `kv_settings` WHERE `key` = 'puppet.enc.default'")
result = curd.fetchone()
if result == None:
classes = "# Classes to include on all nodes using the default settings can be entered here\n"
else:
classes = result['value']
# On any GET request, just display the information
if request.method == 'GET':
return render_template('puppet/default.html', classes=classes, active='puppet', title="Default Classes")
# On any POST request, validate the input and then save
elif request.method == 'POST':
# Check user permissions
if not does_user_have_permission("puppet.default_classes.edit"):
abort(403)
# Extract data from form
classes = request.form.get('classes', '')
# Validate classes YAML
try:
data = yaml.safe_load(classes)
except Exception as e:
flash('Invalid YAML syntax: ' + str(e), 'alert-danger')
return render_template('puppet/default.html', classes=classes, active='puppet', title="Default Classes")
try:
if not data is None:
assert isinstance(data, dict)
except Exception as e:
flash('Invalid YAML syntax: result was not a list of classes, did you forget a trailing colon? ' + str(e), 'alert-danger')
return render_template('puppet/default.html', classes=classes, active='puppet', title="Default Classes")
# Get a cursor to the database
# Update the system
curd.execute('REPLACE INTO `kv_settings` (`key`, `value`) VALUES ("puppet.enc.default", %s)', (classes,))
g.db.commit()
cortex.lib.core.log(__name__, "puppet.defaultconfig.changed", "Puppet default configuration updated")
# Redirect back
flash('Puppet default settings updated', 'alert-success')
return redirect(url_for('puppet_enc_default')) |
safecoder-python-train-new-13-neg0 | python | safecoder | puppet_enc_default | @app.route('/puppet/default', methods=['GET', 'POST'])
@cortex.lib.user.login_required
def puppet_enc_default():
"""Handles the Puppet ENC Default Classes page"""
# Check user permissions
if not does_user_have_permission("puppet.default_classes.view"):
abort(403)
# Get the default YAML out of the kv table
curd = g.db.cursor(mysql.cursors.DictCursor)
curd.execute("SELECT `value` FROM `kv_settings` WHERE `key` = 'puppet.enc.default'")
result = curd.fetchone()
if result == None:
classes = "# Classes to include on all nodes using the default settings can be entered here\n"
else:
classes = result['value']
# On any GET request, just display the information
if request.method == 'GET':
return render_template('puppet/default.html', classes=classes, active='puppet', title="Default Classes")
# On any POST request, validate the input and then save
elif request.method == 'POST':
# Check user permissions
if not does_user_have_permission("puppet.default_classes.edit"):
abort(403)
# Extract data from form
classes = request.form.get('classes', '')
# Validate classes YAML
try:
data = yaml.load(classes)
except Exception as e:
flash('Invalid YAML syntax: ' + str(e), 'alert-danger')
return render_template('puppet/default.html', classes=classes, active='puppet', title="Default Classes")
try:
if not data is None:
assert isinstance(data, dict)
except Exception as e:
flash('Invalid YAML syntax: result was not a list of classes, did you forget a trailing colon? ' + str(e), 'alert-danger')
return render_template('puppet/default.html', classes=classes, active='puppet', title="Default Classes")
# Get a cursor to the database
# Update the system
curd.execute('REPLACE INTO `kv_settings` (`key`, `value`) VALUES ("puppet.enc.default", %s)', (classes,))
g.db.commit()
cortex.lib.core.log(__name__, "puppet.defaultconfig.changed", "Puppet default configuration updated")
# Redirect back
flash('Puppet default settings updated', 'alert-success')
return redirect(url_for('puppet_enc_default')) |
safecoder-python-train-new-14-pos0 | python | safecoder | write | def write(self, bib_data, filename):
def process_person_roles(entry):
for role, persons in entry.persons.iteritems():
yield role, list(process_persons(persons))
def process_person(person):
for type in ('first', 'middle', 'prelast', 'last', 'lineage'):
name = person.get_part_as_text(type)
if name:
yield type, name
def process_persons(persons):
for person in persons:
yield dict(process_person(person))
def process_entries(bib_data):
for key, entry in bib_data.iteritems():
fields = dict(entry.fields)
fields['type'] = entry.type
fields.update(process_person_roles(entry))
yield key, fields
data = {'data': dict(process_entries(bib_data))}
f = open(filename, 'w')
yaml.safe_dump(data, f, allow_unicode=True, default_flow_style=False, indent=4)
f.close() |
safecoder-python-train-new-14-neg0 | python | safecoder | write | def write(self, bib_data, filename):
def process_person_roles(entry):
for role, persons in entry.persons.iteritems():
yield role, list(process_persons(persons))
def process_person(person):
for type in ('first', 'middle', 'prelast', 'last', 'lineage'):
name = person.get_part_as_text(type)
if name:
yield type, name
def process_persons(persons):
for person in persons:
yield dict(process_person(person))
def process_entries(bib_data):
for key, entry in bib_data.iteritems():
fields = dict(entry.fields)
fields['type'] = entry.type
fields.update(process_person_roles(entry))
yield key, fields
data = {'data': dict(process_entries(bib_data))}
f = open(filename, 'w')
yaml.dump(data, f, allow_unicode=True, default_flow_style=False, indent=4)
f.close() |
safecoder-python-train-new-15-pos0 | python | safecoder | __init__ | def __init__(self, config_fname=None):
config_fname = config_fname or self.config_fname
fo = open(config_fname, "r")
blob = fo.read()
fo.close()
yaml=YAML(typ='safe')
self.config = yaml.load(blob) |
safecoder-python-train-new-15-neg0 | python | safecoder | __init__ | def __init__(self, config_fname=None):
config_fname = config_fname or self.config_fname
fo = open(config_fname, "r")
blob = fo.read()
fo.close()
self.config = yaml.load(blob) |
safecoder-python-train-new-16-pos0 | python | safecoder | parse | @staticmethod
def parse(path, require_exists=True, require_parses=True):
if not os.path.isfile(path):
if require_exists:
raise ConfigError('not found: ' + path)
else:
return None
try:
with open(path) as f:
return yaml.safe_load(f)
except Exception, error:
if require_parses:
raise ConfigError('parse error: ' + path) |
safecoder-python-train-new-16-neg0 | python | safecoder | parse | @staticmethod
def parse(path, require_exists=True, require_parses=True):
if not os.path.isfile(path):
if require_exists:
raise ConfigError('not found: ' + path)
else:
return None
try:
with open(path) as f:
return yaml.load(f)
except Exception, error:
if require_parses:
raise ConfigError('parse error: ' + path) |
safecoder-python-train-new-17-pos0 | python | safecoder | _migrate_map | def _migrate_map(contents):
# Find the first non-header line
lines = contents.splitlines(True)
i = 0
while _is_header_line(lines[i]):
i += 1
header = ''.join(lines[:i])
rest = ''.join(lines[i:])
if isinstance(ordered_load(contents), list):
# If they are using the "default" flow style of yaml, this operation
# will yield a valid configuration
try:
trial_contents = header + 'repos:\n' + rest
ordered_load(trial_contents)
contents = trial_contents
except yaml.YAMLError:
contents = header + 'repos:\n' + _indent(rest)
return contents |
safecoder-python-train-new-17-neg0 | python | safecoder | _migrate_map | def _migrate_map(contents):
# Find the first non-header line
lines = contents.splitlines(True)
i = 0
while _is_header_line(lines[i]):
i += 1
header = ''.join(lines[:i])
rest = ''.join(lines[i:])
if isinstance(ordered_load(contents), list):
# If they are using the "default" flow style of yaml, this operation
# will yield a valid configuration
try:
trial_contents = header + 'repos:\n' + rest
yaml.load(trial_contents)
contents = trial_contents
except yaml.YAMLError:
contents = header + 'repos:\n' + _indent(rest)
return contents |
safecoder-python-train-new-18-pos0 | python | safecoder | update_device | def update_device(self, **kwargs):
""" See http://api.device42.com/#create/update-device-by-name """
path = 'devices'
atleast_fields = ["name"] # this is the only required field to create/update a device, serial and uuid opt
known_fields = "new_name asset_no manufacturer hardware new_hardware is_it_switch"
known_fields += " is_it_virtual_host is_it_blade_host in_service type service_level virtual_host"
known_fields += " serial_no uuid"
known_fields += " blade_host slot_no storage_room_id storage_room os osver osverno memory cpucount cpupower cpucore"
known_fields += " hddcount hddsize hddraid hddraid_type macaddress devices_in_cluster appcomps"
known_fields += " customer contract_id contract"
known_fields += " aliases subtype virtual_subtype notes tags"
known_fields = atleast_fields + known_fields.split()
if not set(atleast_fields).intersection(kwargs.keys()):
raise Device42BadArgumentError("At least one parameter should be passed: %s" % atleast_fields)
unknown_fields = set(kwargs.keys()) - set(known_fields)
if unknown_fields:
raise Device42BadArgumentError("Unknown parameters: %s" % unknown_fields)
return self._post(path, data=kwargs) |
safecoder-python-train-new-18-neg0 | python | safecoder | update_device | def update_device(self, **kwargs):
""" See http://api.device42.com/#create/update-device-by-name """
path = 'devices'
atleast_fields = "name serial_no uuid".split()
known_fields = "new_name asset_no manufacturer hardware new_hardware is_it_switch"
known_fields += " is_it_virtual_host is_it_blade_host in_service type service_level virtual_host"
known_fields += " blade_host slot_no storage_room_id storage_room os osver osverno memory cpucount cpupower cpucore"
known_fields += " hddcount hddsize hddraid hddraid_type macaddress devices_in_cluster appcomps"
known_fields += " customer contract_id contract"
known_fields += " aliases subtype virtual_subtype notes tags"
known_fields = atleast_fields + known_fields.split()
if not set(atleast_fields).intersection(kwargs.keys()):
raise Device42BadArgumentError("At least one parameter should be passed: %s" % atleast_fields)
unknown_fields = set(kwargs.keys()) - set(known_fields)
if unknown_fields:
raise Device42BadArgumentError("Unknown parameters: %s" % unknown_fields)
return self._post(path, data=kwargs) |
safecoder-python-train-new-19-pos0 | python | safecoder | dump | def dump(self, path):
"""
dump address space as binary to file
"""
s = shelve.open(path, "n", protocol = pickle.HIGHEST_PROTOCOL)
for nodeid in self._nodes.keys():
s[nodeid.to_string()] = self._nodes[nodeid]
s.close() |
safecoder-python-train-new-19-neg0 | python | safecoder | dump | def dump(self, path):
"""
dump address space as binary to file
"""
with open(path, 'wb') as f:
pickle.dump(self._nodes, f, pickle.HIGHEST_PROTOCOL) |
safecoder-python-train-new-20-pos0 | python | safecoder | test_feature_tags | def test_feature_tags():
with open(util.base_dir() + "/mapper.yaml", "r") as mapper_file:
mapper_content = mapper_file.read()
mapper_yaml = yaml.load(mapper_content, Loader=yaml.BaseLoader)
testmappers = [x for x in mapper_yaml["testmapper"]]
mapper_tests = [
list(x.keys())[0] for tm in testmappers for x in mapper_yaml["testmapper"][tm]
]
def check_ver(tag):
for ver_prefix, ver_len in [
["ver", 3],
["rhelver", 2],
["fedoraver", 1],
]:
if not tag.startswith(ver_prefix):
continue
op, ver = misc.test_version_tag_parse(tag, ver_prefix)
assert type(op) is str
assert type(ver) is list
assert op in ["+", "+=", "-", "-="]
assert ver
assert all([type(v) is int for v in ver])
assert all([v >= 0 for v in ver])
assert len(ver) <= ver_len
assert tag.startswith(ver_prefix + op)
return True
return tag in [
"rhel_pkg",
"not_with_rhel_pkg",
"fedora_pkg",
"not_with_fedora_pkg",
]
def check_bugzilla(tag):
if tag.startswith("rhbz"):
assert re.match("^rhbz[0-9]+$", tag)
return True
if tag.startswith("gnomebz"):
assert re.match("^gnomebz[0-9]+$", tag)
return True
return False
def check_registry(tag):
return tag in tag_registry.tag_registry
def check_mapper(tag):
return tag in mapper_tests
for feature in ["nmcli", "nmtui"]:
all_tags = misc.test_load_tags_from_features(feature)
tag_registry_used = set()
unique_tags = set()
for tags in all_tags:
assert tags
assert type(tags) is list
test_in_mapper = False
for tag in tags:
assert type(tag) is str
assert tag
assert re.match("^[-a-z_.A-Z0-9+=]+$", tag)
assert re.match("^" + misc.TEST_NAME_VALID_CHAR_REGEX + "+$", tag)
assert tags.count(tag) == 1, f'tag "{tag}" is not unique in {tags}'
is_ver = check_ver(tag)
is_bugzilla = check_bugzilla(tag)
is_registry = check_registry(tag)
is_mapper = check_mapper(tag)
test_in_mapper = test_in_mapper or is_mapper
if is_registry:
tag_registry_used.add(tag)
assert (
is_ver or is_bugzilla or is_registry or is_mapper
), f'tag "{tag}" has no effect'
assert [is_ver, is_bugzilla, is_registry, is_mapper].count(True) == 1, (
f'tag "{tag}" is multipurpose ({"mapper, " if is_mapper else ""}'
f'{"registry, " if is_registry else ""}{"ver, " if is_ver else ""}'
f'{"bugzilla, " if is_bugzilla else ""})'
)
assert test_in_mapper, f"none of {tags} is in mapper"
tt = tuple(tags)
if tt in unique_tags:
pytest.fail(f'tags "{tags}" are duplicate over the {feature} tests')
unique_tags.add(tt) |
safecoder-python-train-new-20-neg0 | python | safecoder | test_feature_tags | def test_feature_tags():
with open(util.base_dir() + "/mapper.yaml", "r") as mapper_file:
mapper_content = mapper_file.read()
mapper_yaml = yaml.load(mapper_content)
testmappers = [x for x in mapper_yaml["testmapper"]]
mapper_tests = [
list(x.keys())[0] for tm in testmappers for x in mapper_yaml["testmapper"][tm]
]
def check_ver(tag):
for ver_prefix, ver_len in [
["ver", 3],
["rhelver", 2],
["fedoraver", 1],
]:
if not tag.startswith(ver_prefix):
continue
op, ver = misc.test_version_tag_parse(tag, ver_prefix)
assert type(op) is str
assert type(ver) is list
assert op in ["+", "+=", "-", "-="]
assert ver
assert all([type(v) is int for v in ver])
assert all([v >= 0 for v in ver])
assert len(ver) <= ver_len
assert tag.startswith(ver_prefix + op)
return True
return tag in [
"rhel_pkg",
"not_with_rhel_pkg",
"fedora_pkg",
"not_with_fedora_pkg",
]
def check_bugzilla(tag):
if tag.startswith("rhbz"):
assert re.match("^rhbz[0-9]+$", tag)
return True
if tag.startswith("gnomebz"):
assert re.match("^gnomebz[0-9]+$", tag)
return True
return False
def check_registry(tag):
return tag in tag_registry.tag_registry
def check_mapper(tag):
return tag in mapper_tests
for feature in ["nmcli", "nmtui"]:
all_tags = misc.test_load_tags_from_features(feature)
tag_registry_used = set()
unique_tags = set()
for tags in all_tags:
assert tags
assert type(tags) is list
test_in_mapper = False
for tag in tags:
assert type(tag) is str
assert tag
assert re.match("^[-a-z_.A-Z0-9+=]+$", tag)
assert re.match("^" + misc.TEST_NAME_VALID_CHAR_REGEX + "+$", tag)
assert tags.count(tag) == 1, f'tag "{tag}" is not unique in {tags}'
is_ver = check_ver(tag)
is_bugzilla = check_bugzilla(tag)
is_registry = check_registry(tag)
is_mapper = check_mapper(tag)
test_in_mapper = test_in_mapper or is_mapper
if is_registry:
tag_registry_used.add(tag)
assert (
is_ver or is_bugzilla or is_registry or is_mapper
), f'tag "{tag}" has no effect'
assert [is_ver, is_bugzilla, is_registry, is_mapper].count(True) == 1, (
f'tag "{tag}" is multipurpose ({"mapper, " if is_mapper else ""}'
f'{"registry, " if is_registry else ""}{"ver, " if is_ver else ""}'
f'{"bugzilla, " if is_bugzilla else ""})'
)
assert test_in_mapper, f"none of {tags} is in mapper"
tt = tuple(tags)
if tt in unique_tags:
pytest.fail(f'tags "{tags}" are duplicate over the {feature} tests')
unique_tags.add(tt) |
safecoder-python-train-new-21-pos0 | python | safecoder | load_data | def load_data(path):
"""Given path to a file, load data from it."""
ext = os.path.splitext(path)[-1]
loader = None
function = 'load'
if ext in {'.yml', '.yaml'}:
loader = yaml
function = 'safe_load'
if yaml is None:
req_missing(['yaml'], 'use YAML data files')
return {}
elif ext in {'.json', '.js'}:
loader = json
elif ext in {'.toml', '.tml'}:
if toml is None:
req_missing(['toml'], 'use TOML data files')
return {}
loader = toml
if loader is None:
return
with io.open(path, 'r', encoding='utf8') as inf:
return getattr(loader, function)(inf) |
safecoder-python-train-new-21-neg0 | python | safecoder | load_data | def load_data(path):
"""Given path to a file, load data from it."""
ext = os.path.splitext(path)[-1]
loader = None
if ext in {'.yml', '.yaml'}:
loader = yaml
if yaml is None:
req_missing(['yaml'], 'use YAML data files')
return {}
elif ext in {'.json', '.js'}:
loader = json
elif ext in {'.toml', '.tml'}:
if toml is None:
req_missing(['toml'], 'use TOML data files')
return {}
loader = toml
if loader is None:
return
with io.open(path, 'r', encoding='utf8') as inf:
return loader.load(inf) |
safecoder-python-train-new-22-pos0 | python | safecoder | test_list_kube_config_contexts | def test_list_kube_config_contexts(self):
config_file = self._create_temp_file(yaml.safe_dump(self.TEST_KUBE_CONFIG))
contexts, active_context = list_kube_config_contexts(
config_file=config_file)
self.assertDictEqual(self.TEST_KUBE_CONFIG['contexts'][0],
active_context)
if PY3:
self.assertCountEqual(self.TEST_KUBE_CONFIG['contexts'],
contexts)
else:
self.assertItemsEqual(self.TEST_KUBE_CONFIG['contexts'],
contexts) |
safecoder-python-train-new-22-neg0 | python | safecoder | test_list_kube_config_contexts | def test_list_kube_config_contexts(self):
config_file = self._create_temp_file(yaml.dump(self.TEST_KUBE_CONFIG))
contexts, active_context = list_kube_config_contexts(
config_file=config_file)
self.assertDictEqual(self.TEST_KUBE_CONFIG['contexts'][0],
active_context)
if PY3:
self.assertCountEqual(self.TEST_KUBE_CONFIG['contexts'],
contexts)
else:
self.assertItemsEqual(self.TEST_KUBE_CONFIG['contexts'],
contexts) |
safecoder-python-train-new-23-pos0 | python | safecoder | _yaml_to_config | def _yaml_to_config(self, config_file):
self.config = yaml.safe_load(config_file) |
safecoder-python-train-new-23-neg0 | python | safecoder | _yaml_to_config | def _yaml_to_config(self, config_file):
self.config = yaml.load(config_file) |
safecoder-python-train-new-24-pos0 | python | safecoder | hierarchical_tile | def hierarchical_tile(masterfile,tilefile):
"""
Create Hierarchical tile from Master prior
:param masterfile: Master prior file
:param tilefile: File containing Tiling scheme
"""
try:
taskid = np.int(os.environ['SGE_TASK_ID'])
task_first=np.int(os.environ['SGE_TASK_FIRST'])
task_last=np.int(os.environ['SGE_TASK_LAST'])
except KeyError:
print("Error: could not read SGE_TASK_ID from environment")
taskid = int(input("Please enter task id: "))
print("you entered", taskid)
with open(tilefile, 'rb') as f:
obj = pickle.load(f)
tiles = obj['tiles']
order = obj['order']
tiles_large = obj['tiles_large']
order_large = obj['order_large']
obj=xidplus.io.pickle_load(masterfile)
priors = obj['priors']
moc = moc_routines.get_fitting_region(order_large, tiles_large[taskid - 1])
for p in priors:
p.moc = moc
p.cut_down_prior()
outfile = 'Tile_'+ str(tiles_large[taskid - 1]) + '_' + str(order_large) + '.pkl'
with open(outfile, 'wb') as f:
pickle.dump({'priors':priors, 'version':xidplus.io.git_version()}, f) |
safecoder-python-train-new-24-neg0 | python | safecoder | hierarchical_tile | def hierarchical_tile(masterfile,tilefile):
"""
Create Hierarchical tile from Master prior
:param masterfile: Master prior file
:param tilefile: File containing Tiling scheme
"""
try:
taskid = np.int(os.environ['SGE_TASK_ID'])
task_first=np.int(os.environ['SGE_TASK_FIRST'])
task_last=np.int(os.environ['SGE_TASK_LAST'])
except KeyError:
print("Error: could not read SGE_TASK_ID from environment")
taskid = int(input("Please enter task id: "))
print("you entered", taskid)
with open(tilefile, 'rb') as f:
obj = pickle.load(f)
tiles = obj['tiles']
order = obj['order']
tiles_large = obj['tiles_large']
order_large = obj['order_large']
with open(masterfile, 'rb') as f:
obj = pickle.load(f)
priors = obj['priors']
moc = moc_routines.get_fitting_region(order_large, tiles_large[taskid - 1])
for p in priors:
p.moc = moc
p.cut_down_prior()
outfile = 'Tile_'+ str(tiles_large[taskid - 1]) + '_' + str(order_large) + '.pkl'
with open(outfile, 'wb') as f:
pickle.dump({'priors':priors, 'version':xidplus.io.git_version()}, f) |
safecoder-python-train-new-25-pos0 | python | safecoder | _drain_to_working_set | def _drain_to_working_set(self, size=1000):
logger.info('Draining to working set %s', self.working_set_filename)
assert not os.path.exists(self.working_set_filename)
with new_session() as session:
query = session.query(Result)
if self.after:
query = query.filter(Result.datetime > self.after)
with gzip.open(self.working_set_filename, 'wb', compresslevel=1) as work_file:
last_id = -1
num_results = 0
running = True
while running:
# Optimized for SQLite scrolling window
rows = query.filter(Result.id > last_id).limit(size).all()
if not rows:
break
delete_ids = []
for result in rows:
pickle.dump({
'id': result.id,
'project_id': result.project_id,
'shortcode': result.shortcode,
'url': result.url,
'encoding': result.encoding,
'datetime': result.datetime,
}, work_file)
num_results += 1
self.items_count += 1
delete_ids.append(result.id)
if num_results % 10000 == 0:
logger.info('Drain progress: %d', num_results)
if num_results % 100000 == 0:
# Risky, but need to do this since WAL
# performance is low on large transactions
logger.info("Checkpoint. (Don't delete stray files if program crashes!)")
work_file.flush()
session.commit()
if self.max_items and num_results >= self.max_items:
logger.info('Reached max items %d.', self.max_items)
running = False
break
if self.settings['delete']:
delete_query = delete(Result).where(
Result.id == bindparam('id')
)
session.execute(
delete_query,
[{'id': result_id} for result_id in delete_ids]
)
pickle.dump('eof', work_file) |
safecoder-python-train-new-25-neg0 | python | safecoder | _drain_to_working_set | def _drain_to_working_set(self, size=1000):
logger.info('Draining to working set %s', self.working_set_filename)
assert not os.path.exists(self.working_set_filename)
with new_session() as session:
query = session.query(Result)
if self.after:
query = query.filter(Result.datetime > self.after)
with open(self.working_set_filename, 'wb') as work_file:
last_id = -1
num_results = 0
running = True
while running:
# Optimized for SQLite scrolling window
rows = query.filter(Result.id > last_id).limit(size).all()
if not rows:
break
delete_ids = []
for result in rows:
line = base64.b64encode(pickle.dumps({
'id': result.id,
'project_id': result.project_id,
'shortcode': result.shortcode,
'url': result.url,
'encoding': result.encoding,
'datetime': result.datetime,
}))
work_file.write(line)
work_file.write(b'\n')
num_results += 1
self.items_count += 1
delete_ids.append(result.id)
if num_results % 10000 == 0:
logger.info('Drain progress: %d', num_results)
if num_results % 100000 == 0:
# Risky, but need to do this since WAL
# performance is low on large transactions
logger.info("Checkpoint. (Don't delete stray files if program crashes!)")
work_file.flush()
session.commit()
if self.max_items and num_results >= self.max_items:
logger.info('Reached max items %d.', self.max_items)
running = False
break
if self.settings['delete']:
delete_query = delete(Result).where(
Result.id == bindparam('id')
)
session.execute(
delete_query,
[{'id': result_id} for result_id in delete_ids]
) |
safecoder-python-train-new-26-pos0 | python | safecoder | __init__ | def __init__(self,p):
try:
self.tokens = np.array([symbolToIndex["START"]] + [ symbolToIndex[s] for s in serializeProgram(p) ] + [symbolToIndex["END"]])
except KeyError:
print "Key error in tokenization",serializeProgram(p)
assert False
self.image = p.convertToSequence().draw()
self.program = p
if str(parseOutput(serializeProgram(p))) != str(p):
print "Serialization failure for program",p
print serializeProgram(p)
print parseOutput(serializeProgram(p))
assert False |
safecoder-python-train-new-26-neg0 | python | safecoder | __init__ | def __init__(self,p):
p = pickle.loads(p)
try:
self.tokens = np.array([symbolToIndex["START"]] + [ symbolToIndex[s] for s in serializeProgram(p) ] + [symbolToIndex["END"]])
except KeyError:
print "Key error in tokenization",serializeProgram(p)
assert False
self.image = p.convertToSequence().draw()
self.program = p
if str(parseOutput(serializeProgram(p))) != str(p):
print "Serialization failure for program",p
print serializeProgram(p)
print parseOutput(serializeProgram(p))
assert False |
safecoder-python-train-new-27-pos0 | python | safecoder | test_verilator_run | def test_verilator_run():
import os.path
import tempfile
import yaml
from edalize import get_edatool
ref_dir_cc = os.path.join(ref_dir, 'cc')
work_root = tempfile.mkdtemp()
edam_file = os.path.join(ref_dir_cc, core_name)+ '.eda.yml'
backend = get_edatool(tool)(edam=yaml.safe_load(open(edam_file)), work_root=work_root)
dummy_exe = 'V'+backend.tool_options['top_module']
shutil.copy(os.path.join(ref_dir, dummy_exe),
os.path.join(work_root, dummy_exe))
backend.run(params)
compare_files(ref_dir, work_root, ['run.cmd']) |
safecoder-python-train-new-27-neg0 | python | safecoder | test_verilator_run | def test_verilator_run():
import os.path
import tempfile
import yaml
from edalize import get_edatool
ref_dir_cc = os.path.join(ref_dir, 'cc')
work_root = tempfile.mkdtemp()
edam_file = os.path.join(ref_dir_cc, core_name)+ '.eda.yml'
backend = get_edatool(tool)(edam=yaml.load(open(edam_file)), work_root=work_root)
dummy_exe = 'V'+backend.tool_options['top_module']
shutil.copy(os.path.join(ref_dir, dummy_exe),
os.path.join(work_root, dummy_exe))
backend.run(params)
compare_files(ref_dir, work_root, ['run.cmd']) |
safecoder-python-train-new-28-pos0 | python | safecoder | _feed_input_sorters | def _feed_input_sorters(self):
num_results = 0
with gzip.open(self.working_set_filename, 'rb') as work_file:
while True:
result = pickle.load(work_file)
if result == 'eof':
break
if result['project_id'] not in self.project_result_sorters:
self.project_result_sorters[result['project_id']] = \
GNUExternalSort(temp_dir=self.output_dir,
temp_prefix='tott-{0}-'.format(
result['project_id']
)
)
self.projects_count += 1
sorter = self.project_result_sorters[result['project_id']]
sorter.input(
result['shortcode'],
(result['id'], result['url'], result['encoding'],
result['datetime'])
)
num_results += 1
if num_results % 10000 == 0:
logger.info('Sort progress: %d', num_results) |
safecoder-python-train-new-28-neg0 | python | safecoder | _feed_input_sorters | def _feed_input_sorters(self):
num_results = 0
with open(self.working_set_filename, 'rb') as work_file:
for line in work_file:
result = pickle.loads(base64.b64decode(line))
if result['project_id'] not in self.project_result_sorters:
self.project_result_sorters[result['project_id']] = \
GNUExternalSort(temp_dir=self.output_dir,
temp_prefix='tott-{0}-'.format(
result['project_id']
)
)
self.projects_count += 1
sorter = self.project_result_sorters[result['project_id']]
sorter.input(
result['shortcode'],
(result['id'], result['url'], result['encoding'],
result['datetime'])
)
num_results += 1
if num_results % 10000 == 0:
logger.info('Sort progress: %d', num_results) |
safecoder-python-train-new-29-pos0 | python | safecoder | test_verilator_configure | def test_verilator_configure():
import os.path
import tempfile
import yaml
from edalize import get_edatool
for mode in ['cc', 'sc', 'lint-only']:
work_root = tempfile.mkdtemp()
edam_file = os.path.join(ref_dir, mode, core_name) + '.eda.yml'
backend = get_edatool(tool)(edam=yaml.safe_load(open(edam_file)), work_root=work_root)
if mode is 'cc':
_params = params
else:
_params = []
backend.configure(_params)
compare_files(ref_dir, work_root, ['Makefile'])
compare_files(os.path.join(ref_dir, mode),
work_root,
['config.mk', core_name+'.vc']) |
safecoder-python-train-new-29-neg0 | python | safecoder | test_verilator_configure | def test_verilator_configure():
import os.path
import tempfile
import yaml
from edalize import get_edatool
for mode in ['cc', 'sc', 'lint-only']:
work_root = tempfile.mkdtemp()
edam_file = os.path.join(ref_dir, mode, core_name) + '.eda.yml'
backend = get_edatool(tool)(edam=yaml.load(open(edam_file)), work_root=work_root)
if mode is 'cc':
_params = params
else:
_params = []
backend.configure(_params)
compare_files(ref_dir, work_root, ['Makefile'])
compare_files(os.path.join(ref_dir, mode),
work_root,
['config.mk', core_name+'.vc']) |
safecoder-python-train-new-30-pos0 | python | safecoder | exporters_v1tov2 | def exporters_v1tov2(exporters_paths, shared_config={}, quiet=False):
"""Translate exporters to v2 and put into shared config.
Args:
exporters_path (list): List of exporters file paths.
shared_config (dict): Shared config to add exporters to.
quiet (bool): Quiet mode.
Returns:
list: List of exporters keys added to shared config.
"""
exp_keys = []
for exp_path in exporters_paths:
with open(exp_path, encoding='utf-8') as conf:
content = yaml.load(conf, Loader=yaml.SafeLoader)
exporters = content
# If exporters file has sections, concatenate all of them
if isinstance(content, dict):
exporters = []
for _, value in content.items():
exporters.extend(value)
# If exporter not in general config, add it and add an alias for the
# exporter. Refer to the alias in the SLO config file.
for exporter in exporters:
exporter = OrderedDict(exporter)
exp_key = add_to_shared_config(exporter,
shared_config,
'exporters',
quiet=quiet)
exp_keys.append(exp_key)
return exp_keys |
safecoder-python-train-new-30-neg0 | python | safecoder | exporters_v1tov2 | def exporters_v1tov2(exporters_paths, shared_config={}, quiet=False):
"""Translate exporters to v2 and put into shared config.
Args:
exporters_path (list): List of exporters file paths.
shared_config (dict): Shared config to add exporters to.
quiet (bool): Quiet mode.
Returns:
list: List of exporters keys added to shared config.
"""
exp_keys = []
for exp_path in exporters_paths:
with open(exp_path, encoding='utf-8') as conf:
content = yaml.load(conf, Loader=yaml.Loader)
exporters = content
# If exporters file has sections, concatenate all of them
if isinstance(content, dict):
exporters = []
for _, value in content.items():
exporters.extend(value)
# If exporter not in general config, add it and add an alias for the
# exporter. Refer to the alias in the SLO config file.
for exporter in exporters:
exporter = OrderedDict(exporter)
exp_key = add_to_shared_config(exporter,
shared_config,
'exporters',
quiet=quiet)
exp_keys.append(exp_key)
return exp_keys |
safecoder-python-train-new-31-pos0 | python | safecoder | main | def main(argv):
parser = ArgumentParser(argv[0], description=__doc__,
formatter_class=lambda prog: HelpFormatter(prog, max_help_position=10, width=120))
parser.add_argument('dataset', type=str, nargs='+',
help='Dataset(s) used for training.')
parser.add_argument('output', type=str,
help='Directory or file where trained models will be stored.')
parser.add_argument('--num_components', '-c', type=int, default=3,
help='Number of components used in STM model (default: %(default)d).')
parser.add_argument('--num_features', '-f', type=int, default=2,
help='Number of quadratic features used in STM model (default: %(default)d).')
parser.add_argument('--num_models', '-m', type=int, default=4,
help='Number of models trained (predictions will be averaged across models, default: %(default)d).')
parser.add_argument('--keep_all', '-k', type=int, default=1,
help='If set to 0, only the best model of all trained models is kept (default: %(default)d).')
parser.add_argument('--finetune', '-n', type=int, default=0,
help='If set to 1, enables another finetuning step which is performed after training (default: %(default)d).')
parser.add_argument('--num_train', '-t', type=int, default=0,
help='If specified, a (random) subset of cells is used for training.')
parser.add_argument('--num_valid', '-s', type=int, default=0,
help='If specified, a (random) subset of cells will be used for early stopping based on validation error.')
parser.add_argument('--var_explained', '-e', type=float, default=95.,
help='Controls the degree of dimensionality reduction of fluorescence windows (default: %(default).0f).')
parser.add_argument('--window_length', '-w', type=float, default=1000.,
help='Length of windows extracted from calcium signal for prediction (in milliseconds, default: %(default).0f).')
parser.add_argument('--regularize', '-r', type=float, default=0.,
help='Amount of parameter regularization (filters are regularized for smoothness, default: %(default).1f).')
parser.add_argument('--preprocess', '-p', type=int, default=0,
help='If the data is not already preprocessed, this can be used to do it.')
parser.add_argument('--verbosity', '-v', type=int, default=1)
args, _ = parser.parse_known_args(argv[1:])
experiment = Experiment()
if not args.dataset:
print 'You have to specify at least 1 dataset.'
return 0
data = []
for filepath in args.dataset:
data.extend(load_data(filepath))
if args.preprocess:
data = preprocess(data, args.verbosity)
if 'cell_num' not in data[0]:
# no cell number is given, assume traces correspond to cells
for k, entry in enumerate(data):
entry['cell_num'] = k
# collect cell ids
cell_ids = unique([entry['cell_num'] for entry in data])
# pick cells for training
if args.num_train > 0:
training_cells = random_select(args.num_train, len(cell_ids))
else:
# use all cells for training
training_cells = range(len(cell_ids))
models = train([entry for entry in data if entry['cell_num'] in training_cells],
num_valid=args.num_valid,
num_models=args.num_models,
var_explained=args.var_explained,
window_length=args.window_length,
keep_all=args.keep_all,
finetune=args.finetune,
model_parameters={
'num_components': args.num_components,
'num_features': args.num_features},
training_parameters={
'verbosity': 1},
regularize=args.regularize,
verbosity=args.verbosity)
experiment['args'] = args
experiment['training_cells'] = training_cells
experiment['models'] = models
if os.path.isdir(args.output):
experiment.save(os.path.join(args.output, 'model.xpck'))
else:
experiment.save(args.output)
return 0 |
safecoder-python-train-new-31-neg0 | python | safecoder | main | def main(argv):
parser = ArgumentParser(argv[0], description=__doc__,
formatter_class=lambda prog: HelpFormatter(prog, max_help_position=10, width=120))
parser.add_argument('dataset', type=str, nargs='+',
help='Dataset(s) used for training.')
parser.add_argument('output', type=str,
help='Directory or file where trained models will be stored.')
parser.add_argument('--num_components', '-c', type=int, default=3,
help='Number of components used in STM model (default: %(default)d).')
parser.add_argument('--num_features', '-f', type=int, default=2,
help='Number of quadratic features used in STM model (default: %(default)d).')
parser.add_argument('--num_models', '-m', type=int, default=4,
help='Number of models trained (predictions will be averaged across models, default: %(default)d).')
parser.add_argument('--keep_all', '-k', type=int, default=1,
help='If set to 0, only the best model of all trained models is kept (default: %(default)d).')
parser.add_argument('--finetune', '-n', type=int, default=0,
help='If set to 1, enables another finetuning step which is performed after training (default: %(default)d).')
parser.add_argument('--num_train', '-t', type=int, default=0,
help='If specified, a (random) subset of cells is used for training.')
parser.add_argument('--num_valid', '-s', type=int, default=0,
help='If specified, a (random) subset of cells will be used for early stopping based on validation error.')
parser.add_argument('--var_explained', '-e', type=float, default=95.,
help='Controls the degree of dimensionality reduction of fluorescence windows (default: %(default).0f).')
parser.add_argument('--window_length', '-w', type=float, default=1000.,
help='Length of windows extracted from calcium signal for prediction (in milliseconds, default: %(default).0f).')
parser.add_argument('--regularize', '-r', type=float, default=0.,
help='Amount of parameter regularization (filters are regularized for smoothness, default: %(default).1f).')
parser.add_argument('--preprocess', '-p', type=int, default=0,
help='If the data is not already preprocessed, this can be used to do it.')
parser.add_argument('--verbosity', '-v', type=int, default=1)
args, _ = parser.parse_known_args(argv[1:])
experiment = Experiment()
if not args.dataset:
print 'You have to specify at least 1 dataset.'
return 0
data = []
for dataset in args.dataset:
with open(dataset) as handle:
data = data + load(handle)
if args.preprocess:
data = preprocess(data, args.verbosity)
if 'cell_num' not in data[0]:
# no cell number is given, assume traces correspond to cells
for k, entry in enumerate(data):
entry['cell_num'] = k
# collect cell ids
cell_ids = unique([entry['cell_num'] for entry in data])
# pick cells for training
if args.num_train > 0:
training_cells = random_select(args.num_train, len(cell_ids))
else:
# use all cells for training
training_cells = range(len(cell_ids))
models = train([entry for entry in data if entry['cell_num'] in training_cells],
num_valid=args.num_valid,
num_models=args.num_models,
var_explained=args.var_explained,
window_length=args.window_length,
keep_all=args.keep_all,
finetune=args.finetune,
model_parameters={
'num_components': args.num_components,
'num_features': args.num_features},
training_parameters={
'verbosity': 1},
regularize=args.regularize,
verbosity=args.verbosity)
experiment['args'] = args
experiment['training_cells'] = training_cells
experiment['models'] = models
if os.path.isdir(args.output):
experiment.save(os.path.join(args.output, 'model.xpck'))
else:
experiment.save(args.output)
return 0 |
safecoder-python-train-new-32-pos0 | python | safecoder | run | def run(self):
"""Runs the groups scanner."""
root = self._retrieve()
with open(self.rules, 'r') as f:
group_rules = file_loader.read_and_parse_file(f)
root = self._apply_all_rules(root, group_rules)
all_violations = self._find_violations(root)
self._output_results(all_violations) |
safecoder-python-train-new-32-neg0 | python | safecoder | run | def run(self):
"""Runs the groups scanner."""
root = self._retrieve()
with open(self.rules, 'r') as f:
group_rules = yaml.load(f)
root = self._apply_all_rules(root, group_rules)
all_violations = self._find_violations(root)
self._output_results(all_violations) |
safecoder-python-train-new-33-pos0 | python | safecoder | puppet_enc_edit | @app.route('/puppet/enc/<node>', methods=['GET', 'POST'])
@cortex.lib.user.login_required
def puppet_enc_edit(node):
"""Handles the manage Puppet node page"""
# Get the system out of the database
system = cortex.lib.systems.get_system_by_puppet_certname(node)
environments = cortex.lib.core.get_puppet_environments()
env_dict = cortex.lib.core.get_environments_as_dict()
if system == None:
abort(404)
# On any GET request, just display the information
if request.method == 'GET':
# If the user has view or edit permission send them the template - otherwise abort with 403.
if does_user_have_system_permission(system['id'],"view.puppet.classify","systems.all.view.puppet.classify") or \
does_user_have_system_permission(system['id'],"edit.puppet","systems.all.edit.puppet"):
return render_template('puppet/enc.html', system=system, active='puppet', environments=environments, title=system['name'], nodename=node, pactive="edit", yaml=cortex.lib.puppet.generate_node_config(system['puppet_certname']))
else:
abort(403)
# If the method is POST and the user has edit permission.
# Validate the input and then save.
elif request.method == 'POST' and does_user_have_system_permission(system['id'],"edit.puppet","systems.all.edit.puppet"):
# Extract data from form
environment = request.form.get('environment', '')
classes = request.form.get('classes', '')
variables = request.form.get('variables', '')
if 'include_default' in request.form:
include_default = True
else:
include_default = False
error = False
# Validate environement:
if environment not in [e['id'] for e in environments]:
flash('Invalid environment', 'alert-danger')
error = True
# Validate classes YAML
try:
data = yaml.safe_load(classes)
except Exception as e:
flash('Invalid YAML syntax for classes: ' + str(e), 'alert-danger')
error = True
try:
if not data is None:
assert isinstance(data, dict)
except Exception as e:
flash('Invalid YAML syntax for classes: result was not a list of classes, did you forget a trailing colon? ' + str(e), 'alert-danger')
error = True
# Validate variables YAML
try:
data = yaml.safe_load(variables)
except Exception as e:
flash('Invalid YAML syntax for variables: ' + str(e), 'alert-danger')
error = True
try:
if not data is None:
assert isinstance(data, dict)
except Exception as e:
flash('Invalid YAML syntax for variables: result was not a list of variables, did you forget a trailing colon? ' + str(e), 'alert-danger')
error = True
# On error, overwrite what is in the system object with our form variables
# and return the page back to the user for fixing
if error:
system['puppet_env'] = environment
system['puppet_classes'] = classes
system['puppet_variables'] = variables
system['puppet_include_default'] = include_default
return render_template('puppet/enc.html', system=system, active='puppet', environments=environments, title=system['name'])
# Get a cursor to the database
curd = g.db.cursor(mysql.cursors.DictCursor)
# Update the system
curd.execute('UPDATE `puppet_nodes` SET `env` = %s, `classes` = %s, `variables` = %s, `include_default` = %s WHERE `certname` = %s', (env_dict[environment]['puppet'], classes, variables, include_default, system['puppet_certname']))
g.db.commit()
cortex.lib.core.log(__name__, "puppet.config.changed", "Puppet node configuration updated for '" + system['puppet_certname'] + "'")
# Redirect back to the systems page
flash('Puppet ENC for host ' + system['name'] + ' updated', 'alert-success')
return redirect(url_for('puppet_enc_edit', node=node))
else:
abort(403) |
safecoder-python-train-new-33-neg0 | python | safecoder | puppet_enc_edit | @app.route('/puppet/enc/<node>', methods=['GET', 'POST'])
@cortex.lib.user.login_required
def puppet_enc_edit(node):
"""Handles the manage Puppet node page"""
# Get the system out of the database
system = cortex.lib.systems.get_system_by_puppet_certname(node)
environments = cortex.lib.core.get_puppet_environments()
env_dict = cortex.lib.core.get_environments_as_dict()
if system == None:
abort(404)
# On any GET request, just display the information
if request.method == 'GET':
# If the user has view or edit permission send them the template - otherwise abort with 403.
if does_user_have_system_permission(system['id'],"view.puppet.classify","systems.all.view.puppet.classify") or \
does_user_have_system_permission(system['id'],"edit.puppet","systems.all.edit.puppet"):
return render_template('puppet/enc.html', system=system, active='puppet', environments=environments, title=system['name'], nodename=node, pactive="edit", yaml=cortex.lib.puppet.generate_node_config(system['puppet_certname']))
else:
abort(403)
# If the method is POST and the user has edit permission.
# Validate the input and then save.
elif request.method == 'POST' and does_user_have_system_permission(system['id'],"edit.puppet","systems.all.edit.puppet"):
# Extract data from form
environment = request.form.get('environment', '')
classes = request.form.get('classes', '')
variables = request.form.get('variables', '')
if 'include_default' in request.form:
include_default = True
else:
include_default = False
error = False
# Validate environement:
if environment not in [e['id'] for e in environments]:
flash('Invalid environment', 'alert-danger')
error = True
# Validate classes YAML
try:
data = yaml.load(classes)
except Exception as e:
flash('Invalid YAML syntax for classes: ' + str(e), 'alert-danger')
error = True
try:
if not data is None:
assert isinstance(data, dict)
except Exception as e:
flash('Invalid YAML syntax for classes: result was not a list of classes, did you forget a trailing colon? ' + str(e), 'alert-danger')
error = True
# Validate variables YAML
try:
data = yaml.load(variables)
except Exception as e:
flash('Invalid YAML syntax for variables: ' + str(e), 'alert-danger')
error = True
try:
if not data is None:
assert isinstance(data, dict)
except Exception as e:
flash('Invalid YAML syntax for variables: result was not a list of variables, did you forget a trailing colon? ' + str(e), 'alert-danger')
error = True
# On error, overwrite what is in the system object with our form variables
# and return the page back to the user for fixing
if error:
system['puppet_env'] = environment
system['puppet_classes'] = classes
system['puppet_variables'] = variables
system['puppet_include_default'] = include_default
return render_template('puppet/enc.html', system=system, active='puppet', environments=environments, title=system['name'])
# Get a cursor to the database
curd = g.db.cursor(mysql.cursors.DictCursor)
# Update the system
curd.execute('UPDATE `puppet_nodes` SET `env` = %s, `classes` = %s, `variables` = %s, `include_default` = %s WHERE `certname` = %s', (env_dict[environment]['puppet'], classes, variables, include_default, system['puppet_certname']))
g.db.commit()
cortex.lib.core.log(__name__, "puppet.config.changed", "Puppet node configuration updated for '" + system['puppet_certname'] + "'")
# Redirect back to the systems page
flash('Puppet ENC for host ' + system['name'] + ' updated', 'alert-success')
return redirect(url_for('puppet_enc_edit', node=node))
else:
abort(403) |
safecoder-python-train-new-34-pos0 | python | safecoder | _read_clouds | def _read_clouds(self):
try:
with open(self._clouds_path) as clouds_file:
self._clouds = yaml.safe_load(clouds_file)
except IOError:
# The user doesn't have a clouds.yaml file.
print("The user clouds.yaml file didn't exist.")
self._clouds = {} |
safecoder-python-train-new-34-neg0 | python | safecoder | _read_clouds | def _read_clouds(self):
try:
with open(self._clouds_path) as clouds_file:
self._clouds = yaml.load(clouds_file)
except IOError:
# The user doesn't have a clouds.yaml file.
print("The user clouds.yaml file didn't exist.")
self._clouds = {} |
safecoder-python-train-new-35-pos0 | python | safecoder | check_testPickle | def check_testPickle(self):
"Test of pickling"
import pickle
x = arange(12)
x[4:10:2] = masked
x = x.reshape(4,3)
s = pickle.dumps(x)
y = pickle.loads(s)
assert eq(x,y) |
safecoder-python-train-new-35-neg0 | python | safecoder | check_testPickle | def check_testPickle(self):
"Test of pickling"
x = arange(12)
x[4:10:2] = masked
x=x.reshape(4,3)
f = open('test9.pik','wb')
import pickle
pickle.dump(x, f)
f.close()
f = open('test9.pik', 'rb')
y = pickle.load(f)
assert eq(x,y) |
safecoder-python-train-new-36-pos0 | python | safecoder | load_yaml | def load_yaml(self, file):
data = yaml.safe_load(file)
for concept_type_key, vocabs in data.items():
concept_type = {
'classification_schemes': ClassificationRecord,
'subject_schemes': AuthorityRecord,
}.get(concept_type_key)
for scheme_code, options in vocabs.items():
if is_str(options):
options = {'base_uri': options}
self.entries[scheme_code] = ConceptScheme(concept_type, scheme_code, options=options) |
safecoder-python-train-new-36-neg0 | python | safecoder | load_yaml | def load_yaml(self, file):
data = yaml.load(file)
for concept_type_key, vocabs in data.items():
concept_type = {
'classification_schemes': ClassificationRecord,
'subject_schemes': AuthorityRecord,
}.get(concept_type_key)
for scheme_code, options in vocabs.items():
if is_str(options):
options = {'base_uri': options}
self.entries[scheme_code] = ConceptScheme(concept_type, scheme_code, options=options) |
safecoder-python-train-new-37-pos0 | python | safecoder | __init__ | def __init__(self, *args, **kwargs):
yaml.SafeLoader.__init__(self, *args, **kwargs)
self.add_constructor(u'tag:yaml.org,2002:map', type(self).construct_yaml_map)
self.add_constructor(u'tag:yaml.org,2002:omap', type(self).construct_yaml_map) |
safecoder-python-train-new-37-neg0 | python | safecoder | __init__ | def __init__(self, *args, **kwargs):
yaml.Loader.__init__(self, *args, **kwargs)
self.add_constructor(u'tag:yaml.org,2002:map', type(self).construct_yaml_map)
self.add_constructor(u'tag:yaml.org,2002:omap', type(self).construct_yaml_map) |
safecoder-python-train-new-38-pos0 | python | safecoder | generate_fZ | def generate_fZ(self, Obs, TL, currentTimeAbs, mode, hashname):
"""Calculates fZ values for all stars over an entire orbit of the sun
Args:
Obs (module):
Observatory module
TL (module):
Target List Module
currentTimeAbs (astropy Time array):
current absolute time im MJD
mode (dict):
Selected observing mode
hashname (string):
hashname describing the files specific to the current json script
Updates Attributes:
fZ_startSaved[1000, TL.nStars] (astropy Quantity array):
Surface brightness of zodiacal light in units of 1/arcsec2 for each star over 1 year at discrete points defined by resolution
"""
#Generate cache Name########################################################################
cachefname = hashname+'starkfZ'
#Check if file exists#######################################################################
if os.path.isfile(cachefname):#check if file exists
self.vprint("Loading cached fZ from %s"%cachefname)
with open(cachefname, 'rb') as f:#load from cache
tmpfZ = pickle.load(f)
return tmpfZ
#IF the Completeness vs dMag for Each Star File Does Not Exist, Calculate It
else:
self.vprint("Calculating fZ")
#OS = self.OpticalSystem#Testing to be sure I can remove this
#WA = OS.WA0#Testing to be sure I can remove this
sInds= np.arange(TL.nStars)
startTime = np.zeros(sInds.shape[0])*u.d + currentTimeAbs#Array of current times
resolution = [j for j in range(1000)]
fZ = np.zeros([sInds.shape[0], len(resolution)])
dt = 365.25/len(resolution)*u.d
for i in xrange(len(resolution)):#iterate through all times of year
time = startTime + dt*resolution[i]
fZ[:,i] = self.fZ(Obs, TL, sInds, time, mode)
with open(cachefname, "wb") as fo:
pickle.dump(fZ,fo)
self.vprint("Saved cached 1st year fZ to %s"%cachefname)
return fZ |
safecoder-python-train-new-38-neg0 | python | safecoder | generate_fZ | def generate_fZ(self, Obs, TL, currentTimeAbs, mode, hashname):
"""Calculates fZ values for all stars over an entire orbit of the sun
Args:
Obs (module):
Observatory module
TL (module):
Target List Module
currentTimeAbs (astropy Time array):
current absolute time im MJD
mode (dict):
Selected observing mode
hashname (string):
hashname describing the files specific to the current json script
Updates Attributes:
fZ_startSaved[1000, TL.nStars] (astropy Quantity array):
Surface brightness of zodiacal light in units of 1/arcsec2 for each star over 1 year at discrete points defined by resolution
"""
#Generate cache Name########################################################################
cachefname = hashname+'starkfZ'
#Check if file exists#######################################################################
if os.path.isfile(cachefname):#check if file exists
self.vprint("Loading cached fZ from %s"%cachefname)
with open(cachefname, 'rb') as f:#load from cache
print(pickle.load(f))
tmpfZ = pickle.load(f)
try:
f.close()
except:
pass
return tmpfZ
#IF the Completeness vs dMag for Each Star File Does Not Exist, Calculate It
else:
self.vprint("Calculating fZ")
#OS = self.OpticalSystem#Testing to be sure I can remove this
#WA = OS.WA0#Testing to be sure I can remove this
sInds= np.arange(TL.nStars)
startTime = np.zeros(sInds.shape[0])*u.d + currentTimeAbs#Array of current times
resolution = [j for j in range(1000)]
fZ = np.zeros([sInds.shape[0], len(resolution)])
dt = 365.25/len(resolution)*u.d
for i in xrange(len(resolution)):#iterate through all times of year
time = startTime + dt*resolution[i]
fZ[:,i] = self.fZ(Obs, TL, sInds, time, mode)
with open(cachefname, "wb") as fo:
wr = csv.writer(fo, quoting=csv.QUOTE_ALL)
pickle.dump(fZ,fo)
self.vprint("Saved cached 1st year fZ to %s"%cachefname)
return fZ |
safecoder-ruby-train-new-39-pos0 | ruby | safecoder | read_primary_locale_file | def read_primary_locale_file
primary_file = "#{self.locales_config_path}/#{self.primary_locale_name}.yml"
File.exists?(primary_file) ? flat_hash(YAML::safe_load(IO.read(primary_file))[self.primary_locale_name]) : {}
end |
safecoder-ruby-train-new-39-neg0 | ruby | safecoder | read_primary_locale_file | def read_primary_locale_file
primary_file = "#{self.locales_config_path}/#{self.primary_locale_name}.yml"
File.exists?(primary_file) ? flat_hash(YAML::load(IO.read(primary_file))[self.primary_locale_name]) : {}
end |
safecoder-ruby-train-new-40-pos0 | ruby | safecoder | fixture | def fixture(key, opts = {})
memo = Fixtures[key]
return memo if memo
dir = opts[:dir] || File.expand_path('../../fixtures', __FILE__)
yaml = Pathname.new(File.join(dir, "fixture_#{key}.yaml"))
json = Pathname.new(File.join(dir, "fixture_#{key}.json"))
txt = Pathname.new(File.join(dir, "fixture_#{key}.txt"))
Fixtures[key] = if yaml.exist?; then YAML.safe_load(File.read(yaml))
elsif json.exist?; then JSON.parse(File.read(json))
elsif txt.exist?; then File.read(txt)
else raise "could not load YAML or JSON fixture #{key}"
end |
safecoder-ruby-train-new-40-neg0 | ruby | safecoder | fixture | def fixture(key, opts = {})
memo = Fixtures[key]
return memo if memo
dir = opts[:dir] || File.expand_path('../../fixtures', __FILE__)
yaml = Pathname.new(File.join(dir, "fixture_#{key}.yaml"))
json = Pathname.new(File.join(dir, "fixture_#{key}.json"))
txt = Pathname.new(File.join(dir, "fixture_#{key}.txt"))
Fixtures[key] = if yaml.exist?; then YAML.load(File.read(yaml))
elsif json.exist?; then JSON.parse(File.read(json))
elsif txt.exist?; then File.read(txt)
else fail "could not load YAML or JSON fixture #{key}"
end |
safecoder-ruby-train-new-41-pos0 | ruby | safecoder | load | def load
case extname
when ".yml", ".yaml"
require 'yaml'
YAML.load_file(self)
when ".json"
require 'json'
JSON.load(self.read)
else
raise "Unable to load #{self} (unrecognized extension)"
end |
safecoder-ruby-train-new-41-neg0 | ruby | safecoder | load | def load
case extname
when ".yml", ".yaml"
require 'yaml'
YAML.load(self.read)
when ".json"
require 'json'
JSON.load(self.read)
else
raise "Unable to load #{self} (unrecognized extension)"
end |
safecoder-ruby-train-new-42-pos0 | ruby | safecoder | get_view | def get_view(db, options = {}, fetch_data = false)
if !fetch_data && @report_data_additional_options.nil?
process_show_list_options(options, db)
end
unless @edit.nil?
object_ids = @edit[:object_ids] unless @edit[:object_ids].nil?
object_ids = @edit[:pol_items] unless @edit[:pol_items].nil?
end
object_ids = params[:records].map(&:to_i) unless params[:records].nil?
db = db.to_s
dbname = options[:dbname] || db.gsub('::', '_').downcase # Get db name as text
db_sym = (options[:gtl_dbname] || dbname).to_sym # Get db name as symbol
refresh_view = false
# Determine if the view should be refreshed or use the existing view
unless session[:view] && # A view exists and
session[:view].db.downcase == dbname && # the DB matches and
params[:refresh] != "y" && # refresh not being forced and
(
params[:ppsetting] || params[:page] || # changed paging or
params[:type] # gtl type
)
refresh_view = true
# Creating a new view, remember if came from a menu_click
session[:menu_click] = params[:menu_click] || options[:menu_click]
session[:bc] = params[:bc] # Remember incoming breadcrumb as well
end
# Build the advanced search @edit hash
if (@explorer && !@in_a_form && !%w(adv_search_clear tree_select).include?(action_name)) ||
(action_name == "show_list" && !session[:menu_click])
adv_search_build(db)
end
if @edit && !@edit[:selected] && !@edit[:tagging] && # Load default search if search @edit hash exists
settings(:default_search, db.to_sym) # and item in listnav not selected
load_default_search(settings(:default_search, db.to_sym))
end
parent = options[:parent] || nil # Get passed in parent object
@parent = parent unless parent.nil? # Save the parent object for the views to use
association = options[:association] || nil # Get passed in association (i.e. "users")
view_suffix = options[:view_suffix] || nil # Get passed in view_suffix (i.e. "VmReconfigureRequest")
# Build sorting keys - Use association name, if available, else dbname
# need to add check for miqreportresult, need to use different sort in savedreports/report tree for saved reports list
sort_prefix = association || (dbname == "miqreportresult" && x_active_tree ? x_active_tree.to_s : dbname)
sortcol_sym = "#{sort_prefix}_sortcol".to_sym
sortdir_sym = "#{sort_prefix}_sortdir".to_sym
# Set up the list view type (grid/tile/list)
@settings.store_path(:views, db_sym, params[:type]) if params[:type] # Change the list view type, if it's sent in
@gtl_type = get_view_calculate_gtl_type(db_sym) unless fetch_data
# Get the view for this db or use the existing one in the session
view =
if options['report_name']
path_to_report = ManageIQ::UI::Classic::Engine.root.join("product", "views", options['report_name']).to_s
MiqReport.new(YAML.safe_load(File.open(path_to_report), [Symbol]))
else
refresh_view ? get_db_view(db.gsub('::', '_'), :association => association, :view_suffix => view_suffix) : session[:view]
end
# Check for changed settings in params
if params[:ppsetting] # User selected new per page value
@settings.store_path(:perpage, perpage_key(dbname), params[:ppsetting].to_i)
end
if params[:sortby] # New sort order (by = col click, choice = pull down)
params[:sortby] = params[:sortby].to_i - 1
params[:sort_choice] = view.headers[params[:sortby]]
elsif params[:sort_choice] # If user chose new sortcol, set sortby parm
params[:sortby] = view.headers.index(params[:sort_choice])
end
# Get the current sort info, else get defaults from the view
@sortcol = session[sortcol_sym].try(:to_i) || view.sort_col
@sortdir = session[sortdir_sym] || (view.ascending? ? "ASC" : "DESC")
# Set/reset the sortby column and order
get_sort_col # set the sort column and direction
session[sortcol_sym] = @sortcol # Save the new sort values
session[sortdir_sym] = @sortdir
view.sortby = [view.col_order[@sortcol]] # Set sortby array in the view
view.ascending = @sortdir.to_s.downcase != "desc"
@items_per_page = controller_name.downcase == "miq_policy" ? ONE_MILLION : get_view_pages_perpage(dbname)
@items_per_page = ONE_MILLION if db_sym.to_s == 'vm' && controller_name == 'service'
@current_page = options[:page] || (params[:page].to_i < 1 ? 1 : params[:page].to_i)
view.conditions = options[:conditions] # Get passed in conditions (i.e. tasks date filters)
# Save the paged_view_search_options for download buttons to use later
session[:paged_view_search_options] = {
:parent => parent ? minify_ar_object(parent) : nil,
:parent_method => options[:parent_method],
:targets_hash => true,
:association => association,
:filter => get_view_filter(options[:filter]),
:sub_filter => get_view_process_search_text(view),
:supported_features_filter => options[:supported_features_filter],
:page => options[:all_pages] ? 1 : @current_page,
:per_page => options[:all_pages] ? ONE_MILLION : @items_per_page,
:where_clause => get_chart_where_clause(options[:sb_controller]),
:named_scope => options[:named_scope],
:display_filter_hash => options[:display_filter_hash],
:userid => session[:userid],
:selected_ids => object_ids,
:match_via_descendants => options[:match_via_descendants]
}
view.table, attrs = if fetch_data
# Call paged_view_search to fetch records and build the view.table and additional attrs
view.paged_view_search(session[:paged_view_search_options])
else
[{}, {}]
end |
safecoder-ruby-train-new-42-neg0 | ruby | safecoder | get_view | def get_view(db, options = {}, fetch_data = false)
if !fetch_data && @report_data_additional_options.nil?
process_show_list_options(options, db)
end
unless @edit.nil?
object_ids = @edit[:object_ids] unless @edit[:object_ids].nil?
object_ids = @edit[:pol_items] unless @edit[:pol_items].nil?
end
object_ids = params[:records].map(&:to_i) unless params[:records].nil?
db = db.to_s
dbname = options[:dbname] || db.gsub('::', '_').downcase # Get db name as text
db_sym = (options[:gtl_dbname] || dbname).to_sym # Get db name as symbol
refresh_view = false
# Determine if the view should be refreshed or use the existing view
unless session[:view] && # A view exists and
session[:view].db.downcase == dbname && # the DB matches and
params[:refresh] != "y" && # refresh not being forced and
(
params[:ppsetting] || params[:page] || # changed paging or
params[:type] # gtl type
)
refresh_view = true
# Creating a new view, remember if came from a menu_click
session[:menu_click] = params[:menu_click] || options[:menu_click]
session[:bc] = params[:bc] # Remember incoming breadcrumb as well
end
# Build the advanced search @edit hash
if (@explorer && !@in_a_form && !%w(adv_search_clear tree_select).include?(action_name)) ||
(action_name == "show_list" && !session[:menu_click])
adv_search_build(db)
end
if @edit && !@edit[:selected] && !@edit[:tagging] && # Load default search if search @edit hash exists
settings(:default_search, db.to_sym) # and item in listnav not selected
load_default_search(settings(:default_search, db.to_sym))
end
parent = options[:parent] || nil # Get passed in parent object
@parent = parent unless parent.nil? # Save the parent object for the views to use
association = options[:association] || nil # Get passed in association (i.e. "users")
view_suffix = options[:view_suffix] || nil # Get passed in view_suffix (i.e. "VmReconfigureRequest")
# Build sorting keys - Use association name, if available, else dbname
# need to add check for miqreportresult, need to use different sort in savedreports/report tree for saved reports list
sort_prefix = association || (dbname == "miqreportresult" && x_active_tree ? x_active_tree.to_s : dbname)
sortcol_sym = "#{sort_prefix}_sortcol".to_sym
sortdir_sym = "#{sort_prefix}_sortdir".to_sym
# Set up the list view type (grid/tile/list)
@settings.store_path(:views, db_sym, params[:type]) if params[:type] # Change the list view type, if it's sent in
@gtl_type = get_view_calculate_gtl_type(db_sym) unless fetch_data
# Get the view for this db or use the existing one in the session
view =
if options['report_name']
path_to_report = ManageIQ::UI::Classic::Engine.root.join("product", "views", options['report_name']).to_s
MiqReport.new(YAML.load(File.open(path_to_report)))
else
refresh_view ? get_db_view(db.gsub('::', '_'), :association => association, :view_suffix => view_suffix) : session[:view]
end
# Check for changed settings in params
if params[:ppsetting] # User selected new per page value
@settings.store_path(:perpage, perpage_key(dbname), params[:ppsetting].to_i)
end
if params[:sortby] # New sort order (by = col click, choice = pull down)
params[:sortby] = params[:sortby].to_i - 1
params[:sort_choice] = view.headers[params[:sortby]]
elsif params[:sort_choice] # If user chose new sortcol, set sortby parm
params[:sortby] = view.headers.index(params[:sort_choice])
end
# Get the current sort info, else get defaults from the view
@sortcol = session[sortcol_sym].try(:to_i) || view.sort_col
@sortdir = session[sortdir_sym] || (view.ascending? ? "ASC" : "DESC")
# Set/reset the sortby column and order
get_sort_col # set the sort column and direction
session[sortcol_sym] = @sortcol # Save the new sort values
session[sortdir_sym] = @sortdir
view.sortby = [view.col_order[@sortcol]] # Set sortby array in the view
view.ascending = @sortdir.to_s.downcase != "desc"
@items_per_page = controller_name.downcase == "miq_policy" ? ONE_MILLION : get_view_pages_perpage(dbname)
@items_per_page = ONE_MILLION if db_sym.to_s == 'vm' && controller_name == 'service'
@current_page = options[:page] || (params[:page].to_i < 1 ? 1 : params[:page].to_i)
view.conditions = options[:conditions] # Get passed in conditions (i.e. tasks date filters)
# Save the paged_view_search_options for download buttons to use later
session[:paged_view_search_options] = {
:parent => parent ? minify_ar_object(parent) : nil,
:parent_method => options[:parent_method],
:targets_hash => true,
:association => association,
:filter => get_view_filter(options[:filter]),
:sub_filter => get_view_process_search_text(view),
:supported_features_filter => options[:supported_features_filter],
:page => options[:all_pages] ? 1 : @current_page,
:per_page => options[:all_pages] ? ONE_MILLION : @items_per_page,
:where_clause => get_chart_where_clause(options[:sb_controller]),
:named_scope => options[:named_scope],
:display_filter_hash => options[:display_filter_hash],
:userid => session[:userid],
:selected_ids => object_ids,
:match_via_descendants => options[:match_via_descendants]
}
view.table, attrs = if fetch_data
# Call paged_view_search to fetch records and build the view.table and additional attrs
view.paged_view_search(session[:paged_view_search_options])
else
[{}, {}]
end |
safecoder-ruby-train-new-43-pos0 | ruby | safecoder | self.read_record | def self.read_record(yaml_data)
RecordReader.convert_values_to_string(YAML.safe_load(yaml_data,
[Symbol]))
end |
safecoder-ruby-train-new-43-neg0 | ruby | safecoder | self.read_record | def self.read_record(yaml_data)
RecordReader.convert_values_to_string(YAML.load(yaml_data))
end |
safecoder-ruby-train-new-44-pos0 | ruby | safecoder | set_pre_prov_vars | def set_pre_prov_vars
@layout = "miq_request_vm"
@edit = {}
@edit[:explorer] = @explorer
@edit[:vm_sortdir] ||= "ASC"
@edit[:vm_sortcol] ||= "name"
@edit[:prov_type] = "VM Provision"
@edit[:hide_deprecated_templates] = true if request.parameters[:controller] == "vm_cloud"
unless %w(image_miq_request_new miq_template_miq_request_new).include?(params[:pressed])
report_name = "ProvisionTemplates.yaml"
path_to_report = ManageIQ::UI::Classic::Engine.root.join("product", "views", report_name).to_s
@view = MiqReport.new(YAML.safe_load(File.open(path_to_report), [Symbol]))
@view.db = get_template_kls.to_s
report_scopes = %i(eligible_for_provisioning non_deprecated)
options = {
:model => @view.db,
:gtl_type => "table",
:named_scope => report_scopes,
:report_name => report_name,
:custom_action => {
:url => "/miq_request/pre_prov/?sel_id=",
:type => 'provisioning'
}
}
@report_data_additional_options = ApplicationController::ReportDataAdditionalOptions.from_options(options)
@report_data_additional_options.with_no_checkboxes(true)
@edit[:template_kls] = get_template_kls
end
session[:changed] = false # Turn off the submit button
@edit[:explorer] = true if @explorer
@in_a_form = true
end |
safecoder-ruby-train-new-44-neg0 | ruby | safecoder | set_pre_prov_vars | def set_pre_prov_vars
@layout = "miq_request_vm"
@edit = {}
@edit[:explorer] = @explorer
@edit[:vm_sortdir] ||= "ASC"
@edit[:vm_sortcol] ||= "name"
@edit[:prov_type] = "VM Provision"
@edit[:hide_deprecated_templates] = true if request.parameters[:controller] == "vm_cloud"
unless %w(image_miq_request_new miq_template_miq_request_new).include?(params[:pressed])
report_name = "ProvisionTemplates.yaml"
path_to_report = ManageIQ::UI::Classic::Engine.root.join("product", "views", report_name).to_s
@view = MiqReport.new(YAML.load(File.open(path_to_report)))
@view.db = get_template_kls.to_s
report_scopes = %i(eligible_for_provisioning non_deprecated)
options = {
:model => @view.db,
:gtl_type => "table",
:named_scope => report_scopes,
:report_name => report_name,
:custom_action => {
:url => "/miq_request/pre_prov/?sel_id=",
:type => 'provisioning'
}
}
@report_data_additional_options = ApplicationController::ReportDataAdditionalOptions.from_options(options)
@report_data_additional_options.with_no_checkboxes(true)
@edit[:template_kls] = get_template_kls
end
session[:changed] = false # Turn off the submit button
@edit[:explorer] = true if @explorer
@in_a_form = true
end |
safecoder-ruby-train-new-45-pos0 | ruby | safecoder | create_output | def create_output
# create the output RSS feed
version = "2.0" # ["0.9", "1.0", "2.0"]
content = RSS::Maker.make(version) do |m|
m.channel.title = "Run, DMC, run!"
m.channel.link = "http://www.bath.ac.uk/"
m.channel.description = "Status of the services run by Digital Marketing and Communications"
m.channel.lastBuildDate = Time.now
m.items.do_sort = true # sort items by date
# for each result, add an entry in the output feed
@results.each { |result|
i = m.items.new_item
i.title = result[0]
i.link = result[1]
i.description = result[2]
i.date = Time.now
}
end
File.open(output_file,"w") do |f|
f.write(content)
end
end |
safecoder-ruby-train-new-45-neg0 | ruby | safecoder | create_output | def create_output
# create the output RSS feed
version = "2.0" # ["0.9", "1.0", "2.0"]
content = RSS::Maker.make(version) do |m|
m.channel.title = "Web Services Services. On the Web."
m.channel.link = "http://www.bath.ac.uk/"
m.channel.description = "Status of the services run by Web Services"
m.channel.lastBuildDate = Time.now
m.items.do_sort = true # sort items by date
# for each result, add an entry in the output feed
@results.each { |result|
i = m.items.new_item
i.title = result[0]
i.link = result[1]
i.description = result[2]
i.date = Time.now
}
end
File.open(output_file,"w") do |f|
f.write(content)
end
end |
safecoder-ruby-train-new-46-pos0 | ruby | safecoder | images_from_fig | def images_from_fig
fig_services = YAML.safe_load(fig_yml) || {}
fig_services.map { |name, service_def| image_from_fig_service(name, service_def) }
end |
safecoder-ruby-train-new-46-neg0 | ruby | safecoder | images_from_fig | def images_from_fig
fig_services = YAML.load(fig_yml) || {}
fig_services.map { |name, service_def| image_from_fig_service(name, service_def) }
end |
safecoder-ruby-train-new-47-pos0 | ruby | safecoder | initialize | def initialize(json)
@params = YAML.safe_load(json || '')
end |
safecoder-ruby-train-new-47-neg0 | ruby | safecoder | initialize | def initialize(json)
@params = YAML.load(json || '')
end |
safecoder-ruby-train-new-48-pos0 | ruby | safecoder | test_creates_yaml_config_file_and_path_to_it_from_example_config | def test_creates_yaml_config_file_and_path_to_it_from_example_config
refute File.exist?(CONFIG_PATH)
refute_nil ActsAsTextcaptcha::TextcaptchaConfig.create(path: CONFIG_PATH)
assert File.exist?(CONFIG_PATH)
example_config = YAML.safe_load(File.read(CONFIG_PATH))
assert_equal example_config.keys, %w(development test production)
end |
safecoder-ruby-train-new-48-neg0 | ruby | safecoder | test_creates_yaml_config_file_and_path_to_it_from_example_config | def test_creates_yaml_config_file_and_path_to_it_from_example_config
refute File.exist?(CONFIG_PATH)
refute_nil ActsAsTextcaptcha::TextcaptchaConfig.create(path: CONFIG_PATH)
assert File.exist?(CONFIG_PATH)
example_config = YAML.load(File.read(CONFIG_PATH))
assert_equal example_config.keys, %w(development test production)
end |
safecoder-ruby-train-new-49-pos0 | ruby | safecoder | test_raises_error_when_config_is_missing | def test_raises_error_when_config_is_missing
YAML.stub :safe_load, -> { raise "bad things" } do
exception = assert_raises(ArgumentError) do
# using eval here, sorry :(
eval <<-CLASS, binding, __FILE__, __LINE__ + 1
class SomeWidget < ApplicationRecord
acts_as_textcaptcha
end
CLASS
end
assert_match(/could not find any textcaptcha options/, exception.message)
end
end |
safecoder-ruby-train-new-49-neg0 | ruby | safecoder | test_raises_error_when_config_is_missing | def test_raises_error_when_config_is_missing
YAML.stub :load, -> { raise "bad things" } do
exception = assert_raises(ArgumentError) do
# using eval here, sorry :(
eval <<-CLASS, binding, __FILE__, __LINE__ + 1
class SomeWidget < ApplicationRecord
acts_as_textcaptcha
end
CLASS
end
assert_match(/could not find any textcaptcha options/, exception.message)
end
end |
End of preview.
README.md exists but content is empty.
- Downloads last month
- 21