Merge branch 'develop' into feature/1346-blender-publish-layout-json

This commit is contained in:
Simone Barbieri 2021-04-21 16:55:32 +01:00
commit 25dfd09bbf
148 changed files with 17095 additions and 789 deletions

View file

@ -1,7 +0,0 @@
# Configuration for weekly-digest - https://github.com/apps/weekly-digest
publishDay: sun
canPublishIssues: true
canPublishPullRequests: true
canPublishContributors: true
canPublishStargazers: true
canPublishCommits: true

View file

@ -2,6 +2,10 @@
OpenPype
====
[![documentation](https://github.com/pypeclub/pype/actions/workflows/documentation.yml/badge.svg)](https://github.com/pypeclub/pype/actions/workflows/documentation.yml) ![GitHub Requirements](https://img.shields.io/requires/github/pypeclub/pype?labelColor=303846) ![GitHub VFX Platform](https://img.shields.io/badge/vfx%20platform-2021-lightgrey?labelColor=303846)
Introduction
------------
@ -61,7 +65,8 @@ git clone --recurse-submodules git@github.com:Pypeclub/OpenPype.git
#### To build OpenPype:
1) Run `.\tools\create_env.ps1` to create virtual environment in `.\venv`
2) Run `.\tools\build.ps1` to build OpenPype executables in `.\build\`
2) Run `.\tools\fetch_thirdparty_libs.ps1` to download third-party dependencies like ffmpeg and oiio. Those will be included in build.
3) Run `.\tools\build.ps1` to build OpenPype executables in `.\build\`
To create distributable OpenPype versions, run `./tools/create_zip.ps1` - that will
create zip file with name `openpype-vx.x.x.zip` parsed from current OpenPype repository and
@ -116,8 +121,8 @@ pyenv local 3.7.9
#### To build OpenPype:
1) Run `.\tools\create_env.sh` to create virtual environment in `.\venv`
2) Run `.\tools\build.sh` to build OpenPype executables in `.\build\`
2) Run `.\tools\fetch_thirdparty_libs.sh` to download third-party dependencies like ffmpeg and oiio. Those will be included in build.
3) Run `.\tools\build.sh` to build OpenPype executables in `.\build\`
### Linux

View file

@ -4,11 +4,12 @@ from openpype.lib import PreLaunchHook
class PrePython2Vendor(PreLaunchHook):
"""Prepend python 2 dependencies for py2 hosts."""
# WARNING This hook will probably be deprecated in OpenPype 3 - kept for test
order = 10
app_groups = ["hiero", "nuke", "nukex", "unreal"]
def execute(self):
if not self.application.use_python_2:
return
# Prepare vendor dir path
self.log.info("adding global python 2 vendor")
pype_root = os.getenv("OPENPYPE_REPOS_ROOT")

View file

@ -0,0 +1,23 @@
# -*- coding: utf-8 -*-
"""Creator of Redshift proxy subset types."""
from openpype.hosts.maya.api import plugin, lib
class CreateRedshiftProxy(plugin.Creator):
"""Create instance of Redshift Proxy subset."""
name = "redshiftproxy"
label = "Redshift Proxy"
family = "redshiftproxy"
icon = "gears"
def __init__(self, *args, **kwargs):
super(CreateRedshiftProxy, self).__init__(*args, **kwargs)
animation_data = lib.collect_animation_data()
self.data["animation"] = False
self.data["proxyFrameStart"] = animation_data["frameStart"]
self.data["proxyFrameEnd"] = animation_data["frameEnd"]
self.data["proxyFrameStep"] = animation_data["step"]

View file

@ -105,7 +105,23 @@ class LookLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
# Load relationships
shader_relation = api.get_representation_path(json_representation)
with open(shader_relation, "r") as f:
relationships = json.load(f)
json_data = json.load(f)
for rel, data in json_data["relationships"].items():
# process only non-shading nodes
current_node = "{}:{}".format(container["namespace"], rel)
if current_node in shader_nodes:
continue
print("processing {}".format(rel))
current_members = set(cmds.ls(
cmds.sets(current_node, query=True) or [], long=True))
new_members = {"{}".format(
m["name"]) for m in data["members"] or []}
dif = new_members.difference(current_members)
# add to set
cmds.sets(
dif, forceElement="{}:{}".format(container["namespace"], rel))
# update of reference could result in failed edits - material is not
# present because of renaming etc.
@ -120,7 +136,7 @@ class LookLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
cmds.file(cr=reference_node) # cleanReference
# reapply shading groups from json representation on orig nodes
openpype.hosts.maya.api.lib.apply_shaders(relationships,
openpype.hosts.maya.api.lib.apply_shaders(json_data,
shader_nodes,
orig_nodes)
@ -128,12 +144,13 @@ class LookLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
"All successful edits were kept intact.\n",
"Failed and removed edits:"]
msg.extend(failed_edits)
msg = ScrollMessageBox(QtWidgets.QMessageBox.Warning,
"Some reference edit failed",
msg)
msg.exec_()
attributes = relationships.get("attributes", [])
attributes = json_data.get("attributes", [])
# region compute lookup
nodes_by_id = defaultdict(list)

View file

@ -0,0 +1,146 @@
# -*- coding: utf-8 -*-
"""Loader for Redshift proxy."""
from avalon.maya import lib
from avalon import api
from openpype.api import get_project_settings
import os
import maya.cmds as cmds
import clique
class RedshiftProxyLoader(api.Loader):
"""Load Redshift proxy"""
families = ["redshiftproxy"]
representations = ["rs"]
label = "Import Redshift Proxy"
order = -10
icon = "code-fork"
color = "orange"
def load(self, context, name=None, namespace=None, options=None):
"""Plugin entry point."""
from avalon.maya.pipeline import containerise
from openpype.hosts.maya.api.lib import namespaced
try:
family = context["representation"]["context"]["family"]
except ValueError:
family = "redshiftproxy"
asset_name = context['asset']["name"]
namespace = namespace or lib.unique_namespace(
asset_name + "_",
prefix="_" if asset_name[0].isdigit() else "",
suffix="_",
)
# Ensure Redshift for Maya is loaded.
cmds.loadPlugin("redshift4maya", quiet=True)
with lib.maintained_selection():
cmds.namespace(addNamespace=namespace)
with namespaced(namespace, new=False):
nodes, group_node = self.create_rs_proxy(
name, self.fname)
self[:] = nodes
if not nodes:
return
# colour the group node
settings = get_project_settings(os.environ['AVALON_PROJECT'])
colors = settings['maya']['load']['colors']
c = colors.get(family)
if c is not None:
cmds.setAttr("{0}.useOutlinerColor".format(group_node), 1)
cmds.setAttr("{0}.outlinerColor".format(group_node),
c[0], c[1], c[2])
return containerise(
name=name,
namespace=namespace,
nodes=nodes,
context=context,
loader=self.__class__.__name__)
def update(self, container, representation):
node = container['objectName']
assert cmds.objExists(node), "Missing container"
members = cmds.sets(node, query=True) or []
rs_meshes = cmds.ls(members, type="RedshiftProxyMesh")
assert rs_meshes, "Cannot find RedshiftProxyMesh in container"
filename = api.get_representation_path(representation)
for rs_mesh in rs_meshes:
cmds.setAttr("{}.fileName".format(rs_mesh),
filename,
type="string")
# Update metadata
cmds.setAttr("{}.representation".format(node),
str(representation["_id"]),
type="string")
def remove(self, container):
# Delete container and its contents
if cmds.objExists(container['objectName']):
members = cmds.sets(container['objectName'], query=True) or []
cmds.delete([container['objectName']] + members)
# Remove the namespace, if empty
namespace = container['namespace']
if cmds.namespace(exists=namespace):
members = cmds.namespaceInfo(namespace, listNamespace=True)
if not members:
cmds.namespace(removeNamespace=namespace)
else:
self.log.warning("Namespace not deleted because it "
"still has members: %s", namespace)
def switch(self, container, representation):
self.update(container, representation)
def create_rs_proxy(self, name, path):
"""Creates Redshift Proxies showing a proxy object.
Args:
name (str): Proxy name.
path (str): Path to proxy file.
Returns:
(str, str): Name of mesh with Redshift proxy and its parent
transform.
"""
rs_mesh = cmds.createNode(
'RedshiftProxyMesh', name="{}_RS".format(name))
mesh_shape = cmds.createNode("mesh", name="{}_GEOShape".format(name))
cmds.setAttr("{}.fileName".format(rs_mesh),
path,
type="string")
cmds.connectAttr("{}.outMesh".format(rs_mesh),
"{}.inMesh".format(mesh_shape))
group_node = cmds.group(empty=True, name="{}_GRP".format(name))
mesh_transform = cmds.listRelatives(mesh_shape,
parent=True, fullPath=True)
cmds.parent(mesh_transform, group_node)
nodes = [rs_mesh, mesh_shape, group_node]
# determine if we need to enable animation support
files_in_folder = os.listdir(os.path.dirname(path))
collections, remainder = clique.assemble(files_in_folder)
if collections:
cmds.setAttr("{}.useFrameExtension".format(rs_mesh), 1)
return nodes, group_node

View file

@ -1,8 +1,10 @@
# -*- coding: utf-8 -*-
"""Maya look collector."""
import re
import os
import glob
from maya import cmds
from maya import cmds # noqa
import pyblish.api
from openpype.hosts.maya.api import lib
@ -16,6 +18,11 @@ SHAPE_ATTRS = ["castsShadows",
"doubleSided",
"opposite"]
RENDERER_NODE_TYPES = [
# redshift
"RedshiftMeshParameters"
]
SHAPE_ATTRS = set(SHAPE_ATTRS)
@ -29,7 +36,6 @@ def get_look_attrs(node):
list: Attribute names to extract
"""
# When referenced get only attributes that are "changed since file open"
# which includes any reference edits, otherwise take *all* user defined
# attributes
@ -219,9 +225,13 @@ class CollectLook(pyblish.api.InstancePlugin):
with lib.renderlayer(instance.data["renderlayer"]):
self.collect(instance)
def collect(self, instance):
"""Collect looks.
Args:
instance: Instance to collect.
"""
self.log.info("Looking for look associations "
"for %s" % instance.data['name'])
@ -235,48 +245,91 @@ class CollectLook(pyblish.api.InstancePlugin):
self.log.info("Gathering set relations..")
# Ensure iteration happen in a list so we can remove keys from the
# dict within the loop
for objset in list(sets):
self.log.debug("From %s.." % objset)
# skipped types of attribute on render specific nodes
disabled_types = ["message", "TdataCompound"]
for obj_set in list(sets):
self.log.debug("From {}".format(obj_set))
# if node is specified as renderer node type, it will be
# serialized with its attributes.
if cmds.nodeType(obj_set) in RENDERER_NODE_TYPES:
self.log.info("- {} is {}".format(
obj_set, cmds.nodeType(obj_set)))
node_attrs = []
# serialize its attributes so they can be recreated on look
# load.
for attr in cmds.listAttr(obj_set):
# skip publishedNodeInfo attributes as they break
# getAttr() and we don't need them anyway
if attr.startswith("publishedNodeInfo"):
continue
# skip attributes types defined in 'disabled_type' list
if cmds.getAttr("{}.{}".format(obj_set, attr), type=True) in disabled_types: # noqa
continue
node_attrs.append((
attr,
cmds.getAttr("{}.{}".format(obj_set, attr)),
cmds.getAttr(
"{}.{}".format(obj_set, attr), type=True)
))
for member in cmds.ls(
cmds.sets(obj_set, query=True), long=True):
member_data = self.collect_member_data(member,
instance_lookup)
if not member_data:
continue
# Add information of the node to the members list
sets[obj_set]["members"].append(member_data)
# Get all nodes of the current objectSet (shadingEngine)
for member in cmds.ls(cmds.sets(objset, query=True), long=True):
for member in cmds.ls(cmds.sets(obj_set, query=True), long=True):
member_data = self.collect_member_data(member,
instance_lookup)
if not member_data:
continue
# Add information of the node to the members list
sets[objset]["members"].append(member_data)
sets[obj_set]["members"].append(member_data)
# Remove sets that didn't have any members assigned in the end
# Thus the data will be limited to only what we need.
self.log.info("objset {}".format(sets[objset]))
if not sets[objset]["members"] or (not objset.endswith("SG")):
self.log.info("Removing redundant set information: "
"%s" % objset)
sets.pop(objset, None)
self.log.info("obj_set {}".format(sets[obj_set]))
if not sets[obj_set]["members"]:
self.log.info(
"Removing redundant set information: {}".format(obj_set))
sets.pop(obj_set, None)
self.log.info("Gathering attribute changes to instance members..")
attributes = self.collect_attributes_changed(instance)
# Store data on the instance
instance.data["lookData"] = {"attributes": attributes,
"relationships": sets}
instance.data["lookData"] = {
"attributes": attributes,
"relationships": sets
}
# Collect file nodes used by shading engines (if we have any)
files = list()
looksets = sets.keys()
shaderAttrs = [
"surfaceShader",
"volumeShader",
"displacementShader",
"aiSurfaceShader",
"aiVolumeShader"]
materials = list()
files = []
look_sets = sets.keys()
shader_attrs = [
"surfaceShader",
"volumeShader",
"displacementShader",
"aiSurfaceShader",
"aiVolumeShader"]
if look_sets:
materials = []
if looksets:
for look in looksets:
for at in shaderAttrs:
for look in look_sets:
for at in shader_attrs:
try:
con = cmds.listConnections("{}.{}".format(look, at))
except ValueError:
@ -289,10 +342,10 @@ class CollectLook(pyblish.api.InstancePlugin):
self.log.info("Found materials:\n{}".format(materials))
self.log.info("Found the following sets:\n{}".format(looksets))
self.log.info("Found the following sets:\n{}".format(look_sets))
# Get the entire node chain of the look sets
# history = cmds.listHistory(looksets)
history = list()
# history = cmds.listHistory(look_sets)
history = []
for material in materials:
history.extend(cmds.listHistory(material))
files = cmds.ls(history, type="file", long=True)
@ -313,7 +366,7 @@ class CollectLook(pyblish.api.InstancePlugin):
# Ensure unique shader sets
# Add shader sets to the instance for unify ID validation
instance.extend(shader for shader in looksets if shader
instance.extend(shader for shader in look_sets if shader
not in instance_lookup)
self.log.info("Collected look for %s" % instance)
@ -331,7 +384,7 @@ class CollectLook(pyblish.api.InstancePlugin):
dict
"""
sets = dict()
sets = {}
for node in instance:
related_sets = lib.get_related_sets(node)
if not related_sets:
@ -427,6 +480,11 @@ class CollectLook(pyblish.api.InstancePlugin):
"""
self.log.debug("processing: {}".format(node))
if cmds.nodeType(node) not in ["file", "aiImage"]:
self.log.error(
"Unsupported file node: {}".format(cmds.nodeType(node)))
raise AssertionError("Unsupported file node")
if cmds.nodeType(node) == 'file':
self.log.debug(" - file node")
attribute = "{}.fileTextureName".format(node)
@ -435,6 +493,7 @@ class CollectLook(pyblish.api.InstancePlugin):
self.log.debug("aiImage node")
attribute = "{}.filename".format(node)
computed_attribute = attribute
source = cmds.getAttr(attribute)
self.log.info(" - file source: {}".format(source))
color_space_attr = "{}.colorSpace".format(node)

View file

@ -1,13 +1,14 @@
# -*- coding: utf-8 -*-
"""Maya look extractor."""
import os
import sys
import json
import copy
import tempfile
import contextlib
import subprocess
from collections import OrderedDict
from maya import cmds
from maya import cmds # noqa
import pyblish.api
import avalon.maya
@ -22,23 +23,38 @@ HARDLINK = 2
def find_paths_by_hash(texture_hash):
# Find the texture hash key in the dictionary and all paths that
# originate from it.
"""Find the texture hash key in the dictionary.
All paths that originate from it.
Args:
texture_hash (str): Hash of the texture.
Return:
str: path to texture if found.
"""
key = "data.sourceHashes.{0}".format(texture_hash)
return io.distinct(key, {"type": "version"})
def maketx(source, destination, *args):
"""Make .tx using maketx with some default settings.
"""Make `.tx` using `maketx` with some default settings.
The settings are based on default as used in Arnold's
txManager in the scene.
This function requires the `maketx` executable to be
on the `PATH`.
Args:
source (str): Path to source file.
destination (str): Writing destination path.
"""
*args: Additional arguments for `maketx`.
Returns:
str: Output of `maketx` command.
"""
cmd = [
"maketx",
"-v", # verbose
@ -56,7 +72,7 @@ def maketx(source, destination, *args):
cmd = " ".join(cmd)
CREATE_NO_WINDOW = 0x08000000
CREATE_NO_WINDOW = 0x08000000 # noqa
kwargs = dict(args=cmd, stderr=subprocess.STDOUT)
if sys.platform == "win32":
@ -118,12 +134,58 @@ class ExtractLook(openpype.api.Extractor):
hosts = ["maya"]
families = ["look"]
order = pyblish.api.ExtractorOrder + 0.2
scene_type = "ma"
@staticmethod
def get_renderer_name():
"""Get renderer name from Maya.
Returns:
str: Renderer name.
"""
renderer = cmds.getAttr(
"defaultRenderGlobals.currentRenderer"
).lower()
# handle various renderman names
if renderer.startswith("renderman"):
renderer = "renderman"
return renderer
def get_maya_scene_type(self, instance):
"""Get Maya scene type from settings.
Args:
instance (pyblish.api.Instance): Instance with collected
project settings.
"""
ext_mapping = (
instance.context.data["project_settings"]["maya"]["ext_mapping"]
)
if ext_mapping:
self.log.info("Looking in settings for scene type ...")
# use extension mapping for first family found
for family in self.families:
try:
self.scene_type = ext_mapping[family]
self.log.info(
"Using {} as scene type".format(self.scene_type))
break
except KeyError:
# no preset found
pass
def process(self, instance):
"""Plugin entry point.
Args:
instance: Instance to process.
"""
# Define extract output file path
dir_path = self.staging_dir(instance)
maya_fname = "{0}.ma".format(instance.name)
maya_fname = "{0}.{1}".format(instance.name, self.scene_type)
json_fname = "{0}.json".format(instance.name)
# Make texture dump folder
@ -148,7 +210,7 @@ class ExtractLook(openpype.api.Extractor):
# Collect all unique files used in the resources
files = set()
files_metadata = dict()
files_metadata = {}
for resource in resources:
# Preserve color space values (force value after filepath change)
# This will also trigger in the same order at end of context to
@ -162,35 +224,33 @@ class ExtractLook(openpype.api.Extractor):
# files.update(os.path.normpath(f))
# Process the resource files
transfers = list()
hardlinks = list()
hashes = dict()
forceCopy = instance.data.get("forceCopy", False)
transfers = []
hardlinks = []
hashes = {}
force_copy = instance.data.get("forceCopy", False)
self.log.info(files)
for filepath in files_metadata:
cspace = files_metadata[filepath]["color_space"]
linearise = False
if do_maketx:
if cspace == "sRGB":
linearise = True
# set its file node to 'raw' as tx will be linearized
files_metadata[filepath]["color_space"] = "raw"
linearize = False
if do_maketx and files_metadata[filepath]["color_space"] == "sRGB": # noqa: E501
linearize = True
# set its file node to 'raw' as tx will be linearized
files_metadata[filepath]["color_space"] = "raw"
source, mode, hash = self._process_texture(
source, mode, texture_hash = self._process_texture(
filepath,
do_maketx,
staging=dir_path,
linearise=linearise,
force=forceCopy
linearize=linearize,
force=force_copy
)
destination = self.resource_destination(instance,
source,
do_maketx)
# Force copy is specified.
if forceCopy:
if force_copy:
mode = COPY
if mode == COPY:
@ -202,10 +262,10 @@ class ExtractLook(openpype.api.Extractor):
# Store the hashes from hash to destination to include in the
# database
hashes[hash] = destination
hashes[texture_hash] = destination
# Remap the resources to the destination path (change node attributes)
destinations = dict()
destinations = {}
remap = OrderedDict() # needs to be ordered, see color space values
for resource in resources:
source = os.path.normpath(resource["source"])
@ -222,7 +282,7 @@ class ExtractLook(openpype.api.Extractor):
color_space_attr = resource["node"] + ".colorSpace"
color_space = cmds.getAttr(color_space_attr)
if files_metadata[source]["color_space"] == "raw":
# set colorpsace to raw if we linearized it
# set color space to raw if we linearized it
color_space = "Raw"
# Remap file node filename to destination
attr = resource["attribute"]
@ -267,11 +327,11 @@ class ExtractLook(openpype.api.Extractor):
json.dump(data, f)
if "files" not in instance.data:
instance.data["files"] = list()
instance.data["files"] = []
if "hardlinks" not in instance.data:
instance.data["hardlinks"] = list()
instance.data["hardlinks"] = []
if "transfers" not in instance.data:
instance.data["transfers"] = list()
instance.data["transfers"] = []
instance.data["files"].append(maya_fname)
instance.data["files"].append(json_fname)
@ -311,14 +371,26 @@ class ExtractLook(openpype.api.Extractor):
maya_path))
def resource_destination(self, instance, filepath, do_maketx):
anatomy = instance.context.data["anatomy"]
"""Get resource destination path.
This is utility function to change path if resource file name is
changed by some external tool like `maketx`.
Args:
instance: Current Instance.
filepath (str): Resource path
do_maketx (bool): Flag if resource is processed by `maketx`.
Returns:
str: Path to resource file
"""
resources_dir = instance.data["resourcesDir"]
# Compute destination location
basename, ext = os.path.splitext(os.path.basename(filepath))
# If maketx then the texture will always end with .tx
# If `maketx` then the texture will always end with .tx
if do_maketx:
ext = ".tx"
@ -326,7 +398,7 @@ class ExtractLook(openpype.api.Extractor):
resources_dir, basename + ext
)
def _process_texture(self, filepath, do_maketx, staging, linearise, force):
def _process_texture(self, filepath, do_maketx, staging, linearize, force):
"""Process a single texture file on disk for publishing.
This will:
1. Check whether it's already published, if so it will do hardlink
@ -363,7 +435,7 @@ class ExtractLook(openpype.api.Extractor):
# Produce .tx file in staging if source file is not .tx
converted = os.path.join(staging, "resources", fname + ".tx")
if linearise:
if linearize:
self.log.info("tx: converting sRGB -> linear")
colorconvert = "--colorconvert sRGB linear"
else:

View file

@ -0,0 +1,80 @@
# -*- coding: utf-8 -*-
"""Redshift Proxy extractor."""
import os
import avalon.maya
import openpype.api
from maya import cmds
class ExtractRedshiftProxy(openpype.api.Extractor):
"""Extract the content of the instance to a redshift proxy file."""
label = "Redshift Proxy (.rs)"
hosts = ["maya"]
families = ["redshiftproxy"]
def process(self, instance):
"""Extractor entry point."""
staging_dir = self.staging_dir(instance)
file_name = "{}.rs".format(instance.name)
file_path = os.path.join(staging_dir, file_name)
anim_on = instance.data["animation"]
rs_options = "exportConnectivity=0;enableCompression=1;keepUnused=0;"
repr_files = file_name
if not anim_on:
# Remove animation information because it is not required for
# non-animated subsets
instance.data.pop("proxyFrameStart", None)
instance.data.pop("proxyFrameEnd", None)
else:
start_frame = instance.data["proxyFrameStart"]
end_frame = instance.data["proxyFrameEnd"]
rs_options = "{}startFrame={};endFrame={};frameStep={};".format(
rs_options, start_frame,
end_frame, instance.data["proxyFrameStep"]
)
root, ext = os.path.splitext(file_path)
# Padding is taken from number of digits of the end_frame.
# Not sure where Redshift is taking it.
repr_files = [
"{}.{}{}".format(root, str(frame).rjust(4, "0"), ext) # noqa: E501
for frame in range(
int(start_frame),
int(end_frame) + 1,
int(instance.data["proxyFrameStep"]),
)]
# vertex_colors = instance.data.get("vertexColors", False)
# Write out rs file
self.log.info("Writing: '%s'" % file_path)
with avalon.maya.maintained_selection():
cmds.select(instance.data["setMembers"], noExpand=True)
cmds.file(file_path,
pr=False,
force=True,
type="Redshift Proxy",
exportSelected=True,
options=rs_options)
if "representations" not in instance.data:
instance.data["representations"] = []
self.log.debug("Files: {}".format(repr_files))
representation = {
'name': 'rs',
'ext': 'rs',
'files': repr_files,
"stagingDir": staging_dir,
}
instance.data["representations"].append(representation)
self.log.info("Extracted instance '%s' to: %s"
% (instance.name, staging_dir))

View file

@ -5,7 +5,7 @@ import re
import avalon.maya
import openpype.api
from openpype.hosts.maya.render_setup_tools import export_in_rs_layer
from openpype.hosts.maya.api.render_setup_tools import export_in_rs_layer
from maya import cmds

View file

@ -73,8 +73,10 @@ class ValidateLookSets(pyblish.api.InstancePlugin):
# check if any objectSets are not present ion the relationships
missing_sets = [s for s in sets if s not in relationships]
if missing_sets:
for set in missing_sets:
if '_SET' not in set:
for missing_set in missing_sets:
cls.log.debug(missing_set)
if '_SET' not in missing_set:
# A set of this node is not coming along, this is wrong!
cls.log.error("Missing sets '{}' for node "
"'{}'".format(missing_sets, node))
@ -82,8 +84,8 @@ class ValidateLookSets(pyblish.api.InstancePlugin):
continue
# Ensure the node is in the sets that are collected
for shaderset, data in relationships.items():
if shaderset not in sets:
for shader_set, data in relationships.items():
if shader_set not in sets:
# no need to check for a set if the node
# isn't in it anyway
continue
@ -94,7 +96,7 @@ class ValidateLookSets(pyblish.api.InstancePlugin):
# The node is not found in the collected set
# relationships
cls.log.error("Missing '{}' in collected set node "
"'{}'".format(node, shaderset))
"'{}'".format(node, shader_set))
invalid.append(node)
continue

View file

@ -8,7 +8,7 @@ import openpype.api
class ValidateUnrealMeshTriangulated(pyblish.api.InstancePlugin):
"""Validate if mesh is made of triangles for Unreal Engine"""
order = openpype.api.ValidateMeshOder
order = openpype.api.ValidateMeshOrder
hosts = ["maya"]
families = ["unrealStaticMesh"]
category = "geometry"

View file

@ -390,16 +390,19 @@ def create_write_node(name, data, input=None, prenodes=None, review=True):
"inputName": input.name()})
prev_node = nuke.createNode(
"Input", "name {}".format(input.name()))
prev_node.hideControlPanel()
else:
# generic input node connected to nothing
prev_node = nuke.createNode(
"Input", "name {}".format("rgba"))
prev_node.hideControlPanel()
# creating pre-write nodes `prenodes`
if prenodes:
for name, klass, properties, set_output_to in prenodes:
# create node
now_node = nuke.createNode(klass, "name {}".format(name))
now_node.hideControlPanel()
# add data to knob
for k, v in properties:
@ -421,17 +424,21 @@ def create_write_node(name, data, input=None, prenodes=None, review=True):
for i, node_name in enumerate(set_output_to):
input_node = nuke.createNode(
"Input", "name {}".format(node_name))
input_node.hideControlPanel()
connections.append({
"node": nuke.toNode(node_name),
"inputName": node_name})
now_node.setInput(1, input_node)
elif isinstance(set_output_to, str):
input_node = nuke.createNode(
"Input", "name {}".format(node_name))
input_node.hideControlPanel()
connections.append({
"node": nuke.toNode(set_output_to),
"inputName": set_output_to})
now_node.setInput(0, input_node)
else:
now_node.setInput(0, prev_node)
@ -443,7 +450,7 @@ def create_write_node(name, data, input=None, prenodes=None, review=True):
"inside_{}".format(name),
**_data
)
write_node.hideControlPanel()
# connect to previous node
now_node.setInput(0, prev_node)
@ -451,6 +458,7 @@ def create_write_node(name, data, input=None, prenodes=None, review=True):
prev_node = now_node
now_node = nuke.createNode("Output", "name Output1")
now_node.hideControlPanel()
# connect to previous node
now_node.setInput(0, prev_node)
@ -664,8 +672,7 @@ class WorkfileSettings(object):
]
erased_viewers = []
for v in [n for n in self._nodes
if "Viewer" in n.Class()]:
for v in [n for n in nuke.allNodes(filter="Viewer")]:
v['viewerProcess'].setValue(str(viewer_dict["viewerProcess"]))
if str(viewer_dict["viewerProcess"]) \
not in v['viewerProcess'].value():

View file

@ -55,11 +55,6 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin):
families_ak = avalon_knob_data.get("families", [])
families = list()
if families_ak:
families.append(families_ak.lower())
families.append(family)
# except disabled nodes but exclude backdrops in test
if ("nukenodes" not in family) and (node["disable"].value()):
continue
@ -81,36 +76,33 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin):
# Add all nodes in group instances.
if node.Class() == "Group":
# only alter families for render family
if "write" in families_ak:
if "write" in families_ak.lower():
target = node["render"].value()
if target == "Use existing frames":
# Local rendering
self.log.info("flagged for no render")
families.append(family)
elif target == "Local":
# Local rendering
self.log.info("flagged for local render")
families.append("{}.local".format(family))
family = families_ak.lower()
elif target == "On farm":
# Farm rendering
self.log.info("flagged for farm render")
instance.data["transfer"] = False
families.append("{}.farm".format(family))
# suffle family to `write` as it is main family
# this will be changed later on in process
if "render" in families:
families.remove("render")
family = "write"
elif "prerender" in families:
families.remove("prerender")
family = "write"
family = families_ak.lower()
node.begin()
for i in nuke.allNodes():
instance.append(i)
node.end()
if not families and families_ak and family not in [
"render", "prerender"]:
families.append(families_ak.lower())
self.log.debug("__ family: `{}`".format(family))
self.log.debug("__ families: `{}`".format(families))
# Get format
@ -124,7 +116,9 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin):
anlib.add_publish_knob(node)
# sync workfile version
if not next((f for f in families
_families_test = [family] + families
self.log.debug("__ _families_test: `{}`".format(_families_test))
if not next((f for f in _families_test
if "prerender" in f),
None) and self.sync_workfile_version:
# get version to instance for integration

View file

@ -1,4 +1,5 @@
import os
import re
import nuke
import pyblish.api
import openpype.api as pype
@ -14,11 +15,8 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
hosts = ["nuke", "nukeassist"]
families = ["write"]
# preset attributes
sync_workfile_version = True
def process(self, instance):
families = instance.data["families"]
_families_test = [instance.data["family"]] + instance.data["families"]
node = None
for x in instance:
@ -63,7 +61,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
int(last_frame)
)
if [fm for fm in families
if [fm for fm in _families_test
if fm in ["render", "prerender"]]:
if "representations" not in instance.data:
instance.data["representations"] = list()
@ -91,9 +89,9 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
collected_frames_len))
# this will only run if slate frame is not already
# rendered from previews publishes
if "slate" in instance.data["families"] \
if "slate" in _families_test \
and (frame_length == collected_frames_len) \
and ("prerender" not in instance.data["families"]):
and ("prerender" not in _families_test):
frame_slate_str = "%0{}d".format(
len(str(last_frame))) % (first_frame - 1)
slate_frame = collected_frames[0].replace(
@ -107,10 +105,17 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
self.log.debug("couldn't collect frames: {}".format(label))
# Add version data to instance
colorspace = node["colorspace"].value()
# remove default part of the string
if "default (" in colorspace:
colorspace = re.sub(r"default.\(|\)", "", colorspace)
self.log.debug("colorspace: `{}`".format(colorspace))
version_data = {
"families": [f.replace(".local", "").replace(".farm", "")
for f in families if "write" not in f],
"colorspace": node["colorspace"].value(),
for f in _families_test if "write" not in f],
"colorspace": colorspace
}
group_node = [x for x in instance if x.Class() == "Group"][0]
@ -135,13 +140,12 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
"frameStartHandle": first_frame,
"frameEndHandle": last_frame,
"outputType": output_type,
"families": families,
"colorspace": node["colorspace"].value(),
"colorspace": colorspace,
"deadlineChunkSize": deadlineChunkSize,
"deadlinePriority": deadlinePriority
})
if "prerender" in families:
if "prerender" in _families_test:
instance.data.update({
"family": "prerender",
"families": []
@ -166,6 +170,4 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
"filename": api.get_representation_path(repre_doc)
}]
self.log.debug("families: {}".format(families))
self.log.debug("instance.data: {}".format(instance.data))

View file

@ -5,23 +5,50 @@ import clique
@pyblish.api.log
class RepairCollectionAction(pyblish.api.Action):
label = "Repair"
class RepairActionBase(pyblish.api.Action):
on = "failed"
icon = "wrench"
@staticmethod
def get_instance(context, plugin):
# Get the errored instances
failed = []
for result in context.data["results"]:
if (result["error"] is not None and result["instance"] is not None
and result["instance"] not in failed):
failed.append(result["instance"])
# Apply pyblish.logic to get the instances for the plug-in
return pyblish.api.instances_by_plugin(failed, plugin)
def repair_knob(self, instances, state):
for instance in instances:
files_remove = [os.path.join(instance.data["outputDir"], f)
for r in instance.data.get("representations", [])
for f in r.get("files", [])
]
self.log.info("Files to be removed: {}".format(files_remove))
for f in files_remove:
os.remove(f)
self.log.debug("removing file: {}".format(f))
instance[0]["render"].setValue(state)
self.log.info("Rendering toggled to `{}`".format(state))
class RepairCollectionActionToLocal(RepairActionBase):
label = "Repair > rerender with `Local` machine"
def process(self, context, plugin):
self.log.info(context[0][0])
files_remove = [os.path.join(context[0].data["outputDir"], f)
for r in context[0].data.get("representations", [])
for f in r.get("files", [])
]
self.log.info("Files to be removed: {}".format(files_remove))
for f in files_remove:
os.remove(f)
self.log.debug("removing file: {}".format(f))
context[0][0]["render"].setValue(True)
self.log.info("Rendering toggled ON")
instances = self.get_instance(context, plugin)
self.repair_knob(instances, "Local")
class RepairCollectionActionToFarm(RepairActionBase):
label = "Repair > rerender `On farm` with remote machines"
def process(self, context, plugin):
instances = self.get_instance(context, plugin)
self.repair_knob(instances, "On farm")
class ValidateRenderedFrames(pyblish.api.InstancePlugin):
@ -32,26 +59,28 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin):
label = "Validate rendered frame"
hosts = ["nuke", "nukestudio"]
actions = [RepairCollectionAction]
actions = [RepairCollectionActionToLocal, RepairCollectionActionToFarm]
def process(self, instance):
for repre in instance.data.get('representations'):
for repre in instance.data["representations"]:
if not repre.get('files'):
if not repre.get("files"):
msg = ("no frames were collected, "
"you need to render them")
self.log.error(msg)
raise ValidationException(msg)
collections, remainder = clique.assemble(repre["files"])
self.log.info('collections: {}'.format(str(collections)))
self.log.info('remainder: {}'.format(str(remainder)))
self.log.info("collections: {}".format(str(collections)))
self.log.info("remainder: {}".format(str(remainder)))
collection = collections[0]
frame_length = int(
instance.data["frameEndHandle"] - instance.data["frameStartHandle"] + 1
instance.data["frameEndHandle"]
- instance.data["frameStartHandle"] + 1
)
if frame_length != 1:
@ -65,15 +94,10 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin):
self.log.error(msg)
raise ValidationException(msg)
# if len(remainder) != 0:
# msg = "There are some extra files in folder"
# self.log.error(msg)
# raise ValidationException(msg)
collected_frames_len = int(len(collection.indexes))
self.log.info('frame_length: {}'.format(frame_length))
self.log.info("frame_length: {}".format(frame_length))
self.log.info(
'len(collection.indexes): {}'.format(collected_frames_len)
"len(collection.indexes): {}".format(collected_frames_len)
)
if ("slate" in instance.data["families"]) \
@ -84,6 +108,6 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin):
"{} missing frames. Use repair to render all frames"
).format(__name__)
instance.data['collection'] = collection
instance.data["collection"] = collection
return

View file

@ -78,6 +78,14 @@ class CollectInstances(pyblish.api.ContextPlugin):
if instance is None:
continue
any_visible = False
for layer in instance.data["layers"]:
if layer["visible"]:
any_visible = True
break
instance.data["publish"] = any_visible
instance.data["frameStart"] = context.data["sceneMarkIn"] + 1
instance.data["frameEnd"] = context.data["sceneMarkOut"] + 1
@ -108,7 +116,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
group_id = instance_data["group_id"]
group_layers = []
for layer in layers_data:
if layer["group_id"] == group_id and layer["visible"]:
if layer["group_id"] == group_id:
group_layers.append(layer)
if not group_layers:

View file

@ -57,7 +57,10 @@ class CollectWorkfileData(pyblish.api.ContextPlugin):
# Collect context from workfile metadata
self.log.info("Collecting workfile context")
workfile_context = pipeline.get_current_workfile_context()
# Store workfile context to pyblish context
context.data["workfile_context"] = workfile_context
if workfile_context:
# Change current context with context from workfile
key_map = (
@ -67,16 +70,27 @@ class CollectWorkfileData(pyblish.api.ContextPlugin):
for env_key, key in key_map:
avalon.api.Session[env_key] = workfile_context[key]
os.environ[env_key] = workfile_context[key]
self.log.info("Context changed to: {}".format(workfile_context))
asset_name = workfile_context["asset"]
task_name = workfile_context["task"]
else:
asset_name = current_context["asset"]
task_name = current_context["task"]
# Handle older workfiles or workfiles without metadata
self.log.warning(
self.log.warning((
"Workfile does not contain information about context."
" Using current Session context."
)
workfile_context = current_context.copy()
))
context.data["workfile_context"] = workfile_context
self.log.info("Context changed to: {}".format(workfile_context))
# Store context asset name
context.data["asset"] = asset_name
self.log.info(
"Context is set to Asset: \"{}\" and Task: \"{}\"".format(
asset_name, task_name
)
)
# Collect instances
self.log.info("Collecting instance data from workfile")

View file

@ -0,0 +1,55 @@
import pyblish.api
from avalon.tvpaint import pipeline
class FixAssetNames(pyblish.api.Action):
"""Repair the asset names.
Change instanace metadata in the workfile.
"""
label = "Repair"
icon = "wrench"
on = "failed"
def process(self, context, plugin):
context_asset_name = context.data["asset"]
old_instance_items = pipeline.list_instances()
new_instance_items = []
for instance_item in old_instance_items:
instance_asset_name = instance_item.get("asset")
if (
instance_asset_name
and instance_asset_name != context_asset_name
):
instance_item["asset"] = context_asset_name
new_instance_items.append(instance_item)
pipeline._write_instances(new_instance_items)
class ValidateMissingLayers(pyblish.api.ContextPlugin):
"""Validate assset name present on instance.
Asset name on instance should be the same as context's.
"""
label = "Validate Asset Names"
order = pyblish.api.ValidatorOrder
hosts = ["tvpaint"]
actions = [FixAssetNames]
def process(self, context):
context_asset_name = context.data["asset"]
for instance in context:
asset_name = instance.data.get("asset")
if asset_name and asset_name == context_asset_name:
continue
instance_label = (
instance.data.get("label") or instance.data["name"]
)
raise AssertionError((
"Different asset name on instance then context's."
" Instance \"{}\" has asset name: \"{}\""
" Context asset name is: \"{}\""
).format(instance_label, asset_name, context_asset_name))

View file

@ -13,7 +13,15 @@ class ValidateWorkfileProjectName(pyblish.api.ContextPlugin):
order = pyblish.api.ValidatorOrder
def process(self, context):
workfile_context = context.data["workfile_context"]
workfile_context = context.data.get("workfile_context")
# If workfile context is missing than project is matching to
# `AVALON_PROJECT` value for 100%
if not workfile_context:
self.log.info(
"Workfile context (\"workfile_context\") is not filled."
)
return
workfile_project_name = workfile_context["project"]
env_project_name = os.environ["AVALON_PROJECT"]
if workfile_project_name == env_project_name:

View file

@ -24,7 +24,7 @@ class UnrealPrelaunchHook(PreLaunchHook):
asset_name = self.data["asset_name"]
task_name = self.data["task_name"]
workdir = self.launch_context.env["AVALON_WORKDIR"]
engine_version = self.app_name.split("_")[-1].replace("-", ".")
engine_version = self.app_name.split("/")[-1].replace("-", ".")
unreal_project_name = f"{asset_name}_{task_name}"
# Unreal is sensitive about project names longer then 20 chars

View file

@ -6,11 +6,21 @@ import sys
import os
import site
# add Python version specific vendor folder
site.addsitedir(
os.path.join(
os.getenv("OPENPYPE_REPOS_ROOT", ""),
"vendor", "python", "python_{}".format(sys.version[0])))
# Add Python version specific vendor folder
python_version_dir = os.path.join(
os.getenv("OPENPYPE_REPOS_ROOT", ""),
"openpype", "vendor", "python", "python_{}".format(sys.version[0])
)
# Prepend path in sys paths
sys.path.insert(0, python_version_dir)
site.addsitedir(python_version_dir)
from .env_tools import (
env_value_to_bool,
get_paths_from_environ,
get_global_environments
)
from .terminal import Terminal
from .execute import (
@ -33,10 +43,11 @@ from .anatomy import (
from .config import get_datetime_data
from .env_tools import (
env_value_to_bool,
get_paths_from_environ,
get_global_environments
from .vendor_bin_utils import (
get_vendor_bin_path,
get_oiio_tools_path,
get_ffmpeg_tool_path,
ffprobe_streams
)
from .python_module_tools import (
@ -116,11 +127,6 @@ from .path_tools import (
get_last_version_from_path
)
from .ffmpeg_utils import (
get_ffmpeg_tool_path,
ffprobe_streams
)
from .editorial import (
is_overlapping_otio_ranges,
otio_range_to_frame_range,
@ -143,6 +149,11 @@ __all__ = [
"get_paths_from_environ",
"get_global_environments",
"get_vendor_bin_path",
"get_oiio_tools_path",
"get_ffmpeg_tool_path",
"ffprobe_streams",
"modules_from_path",
"recursive_bases_from_class",
"classes_from_module",
@ -199,9 +210,6 @@ __all__ = [
"get_version_from_path",
"get_last_version_from_path",
"ffprobe_streams",
"get_ffmpeg_tool_path",
"terminal",
"merge_dict",

View file

@ -179,6 +179,7 @@ class Application:
if group.enabled:
enabled = data.get("enabled", True)
self.enabled = enabled
self.use_python_2 = data["use_python_2"]
self.label = data.get("variant_label") or name
self.full_name = "/".join((group.name, name))

View file

@ -1,5 +1,4 @@
import os
from openpype.settings import get_environments
def env_value_to_bool(env_key=None, value=None, default=False):
@ -89,6 +88,7 @@ def get_global_environments(env=None):
"""
import acre
from openpype.modules import ModulesManager
from openpype.settings import get_environments
if env is None:
env = {}

View file

@ -9,6 +9,7 @@ import tempfile
from .execute import run_subprocess
from .profiles_filtering import filter_profiles
from .vendor_bin_utils import get_oiio_tools_path
from openpype.settings import get_project_settings
@ -127,7 +128,7 @@ def filter_pyblish_plugins(plugins):
plugin_kind = file.split(os.path.sep)[-2:-1][0]
# TODO: change after all plugins are moved one level up
if host_from_file == "pype":
if host_from_file == "openpype":
host_from_file = "global"
try:
@ -235,7 +236,7 @@ def oiio_supported():
Returns:
(bool)
"""
oiio_path = os.getenv("OPENPYPE_OIIO_PATH", "")
oiio_path = get_oiio_tools_path()
if not oiio_path or not os.path.exists(oiio_path):
log.debug("OIIOTool is not configured or not present at {}".
format(oiio_path))
@ -269,7 +270,7 @@ def decompress(target_dir, file_url,
(int(input_frame_end) > int(input_frame_start))
oiio_cmd = []
oiio_cmd.append(os.getenv("OPENPYPE_OIIO_PATH"))
oiio_cmd.append(get_oiio_tools_path())
oiio_cmd.append("--compression none")
@ -328,7 +329,7 @@ def should_decompress(file_url):
"""
if oiio_supported():
output = run_subprocess([
os.getenv("OPENPYPE_OIIO_PATH"),
get_oiio_tools_path(),
"--info", "-v", file_url])
return "compression: \"dwaa\"" in output or \
"compression: \"dwab\"" in output

View file

@ -1,33 +1,60 @@
import os
import logging
import json
import platform
import subprocess
from . import get_paths_from_environ
log = logging.getLogger("FFmpeg utils")
def get_ffmpeg_tool_path(tool="ffmpeg"):
"""Find path to ffmpeg tool in FFMPEG_PATH paths.
def get_vendor_bin_path(bin_app):
"""Path to OpenPype vendorized binaries.
Function looks for tool in paths set in FFMPEG_PATH environment. If tool
exists then returns it's full path.
Vendorized executables are expected in specific hierarchy inside build or
in code source.
"{OPENPYPE_ROOT}/vendor/bin/{name of vendorized app}/{platform}"
Args:
tool (string): tool name
bin_app (str): Name of vendorized application.
Returns:
(str): tool name itself when tool path was not found. (FFmpeg path
may be set in PATH environment variable)
str: Path to vendorized binaries folder.
"""
dir_paths = get_paths_from_environ("FFMPEG_PATH")
for dir_path in dir_paths:
for file_name in os.listdir(dir_path):
base, _ext = os.path.splitext(file_name)
if base.lower() == tool.lower():
return os.path.join(dir_path, tool)
return tool
return os.path.join(
os.environ["OPENPYPE_ROOT"],
"vendor",
"bin",
bin_app,
platform.system().lower()
)
def get_oiio_tools_path(tool="oiiotool"):
"""Path to vendorized OpenImageIO tool executables.
Args:
tool (string): Tool name (oiiotool, maketx, ...).
Default is "oiiotool".
"""
oiio_dir = get_vendor_bin_path("oiio")
return os.path.join(oiio_dir, tool)
def get_ffmpeg_tool_path(tool="ffmpeg"):
"""Path to vendorized FFmpeg executable.
Args:
tool (string): Tool name (ffmpeg, ffprobe, ...).
Default is "ffmpeg".
Returns:
str: Full path to ffmpeg executable.
"""
ffmpeg_dir = get_vendor_bin_path("ffmpeg")
if platform.system().lower() == "windows":
ffmpeg_dir = os.path.join(ffmpeg_dir, "bin")
return os.path.join(ffmpeg_dir, tool)
def ffprobe_streams(path_to_file, logger=None):

View file

@ -34,7 +34,12 @@ class ClockifyAPI:
self.request_counter = 0
self.request_time = time.time()
self.secure_registry = OpenPypeSecureRegistry("clockify")
self._secure_registry = None
def secure_registry(self):
if self._secure_registry is None:
self._secure_registry = OpenPypeSecureRegistry("clockify")
return self._secure_registry
@property
def headers(self):

View file

@ -42,7 +42,17 @@ class FtrackModule(
ftrack_settings = settings[self.name]
self.enabled = ftrack_settings["enabled"]
self.ftrack_url = ftrack_settings["ftrack_server"].strip("/ ")
# Add http schema
ftrack_url = ftrack_settings["ftrack_server"].strip("/ ")
if ftrack_url:
if "http" not in ftrack_url:
ftrack_url = "https://" + ftrack_url
# Check if "ftrack.app" is part os url
if "ftrackapp.com" not in ftrack_url:
ftrack_url = ftrack_url + ".ftrackapp.com"
self.ftrack_url = ftrack_url
current_dir = os.path.dirname(os.path.abspath(__file__))
server_event_handlers_paths = [

View file

@ -8,10 +8,13 @@ class PrePython2Support(PreLaunchHook):
Path to vendor modules is added to the beggining of PYTHONPATH.
"""
# There will be needed more granular filtering in future
app_groups = ["maya", "nuke", "nukex", "hiero", "nukestudio", "unreal"]
def execute(self):
if not self.application.use_python_2:
return
self.log.info("Adding Ftrack Python 2 packages to PYTHONPATH.")
# Prepare vendor dir path
python_2_vendor = os.path.join(FTRACK_MODULE_DIR, "python2_vendor")

View file

@ -15,7 +15,10 @@ API_KEY_KEY = "api_key"
def get_ftrack_hostname(ftrack_server=None):
if not ftrack_server:
ftrack_server = os.environ["FTRACK_SERVER"]
ftrack_server = os.environ.get("FTRACK_SERVER")
if not ftrack_server:
return None
if "//" not in ftrack_server:
ftrack_server = "//" + ftrack_server
@ -29,17 +32,24 @@ def _get_ftrack_secure_key(hostname, key):
def get_credentials(ftrack_server=None):
output = {
USERNAME_KEY: None,
API_KEY_KEY: None
}
hostname = get_ftrack_hostname(ftrack_server)
if not hostname:
return output
username_name = _get_ftrack_secure_key(hostname, USERNAME_KEY)
api_key_name = _get_ftrack_secure_key(hostname, API_KEY_KEY)
username_registry = OpenPypeSecureRegistry(username_name)
api_key_registry = OpenPypeSecureRegistry(api_key_name)
return {
USERNAME_KEY: username_registry.get_item(USERNAME_KEY, None),
API_KEY_KEY: api_key_registry.get_item(API_KEY_KEY, None)
}
output[USERNAME_KEY] = username_registry.get_item(USERNAME_KEY, None)
output[API_KEY_KEY] = api_key_registry.get_item(API_KEY_KEY, None)
return output
def save_credentials(username, api_key, ftrack_server=None):
@ -77,9 +87,9 @@ def clear_credentials(ftrack_server=None):
def check_credentials(username, api_key, ftrack_server=None):
if not ftrack_server:
ftrack_server = os.environ["FTRACK_SERVER"]
ftrack_server = os.environ.get("FTRACK_SERVER")
if not username or not api_key:
if not ftrack_server or not username or not api_key:
return False
try:

View file

@ -289,12 +289,6 @@ class FtrackTrayWrapper:
parent_menu.addMenu(tray_menu)
def tray_start(self):
self.validate()
def tray_exit(self):
self.stop_action_server()
# Definition of visibility of each menu actions
def set_menu_visibility(self):
self.tray_server_menu.menuAction().setVisible(self.bool_logged)

View file

@ -134,11 +134,11 @@ class CredentialsDialog(QtWidgets.QDialog):
def fill_ftrack_url(self):
url = os.getenv("FTRACK_SERVER")
if url == self.ftsite_input.text():
checked_url = self.check_url(url)
if checked_url == self.ftsite_input.text():
return
checked_url = self.check_url(url)
self.ftsite_input.setText(checked_url or "")
self.ftsite_input.setText(checked_url or "< Not set >")
enabled = bool(checked_url)
@ -147,7 +147,15 @@ class CredentialsDialog(QtWidgets.QDialog):
self.api_input.setEnabled(enabled)
self.user_input.setEnabled(enabled)
self.ftsite_input.setEnabled(enabled)
if not url:
self.btn_advanced.hide()
self.btn_simple.hide()
self.btn_ftrack_login.hide()
self.btn_login.hide()
self.note_label.hide()
self.api_input.hide()
self.user_input.hide()
def set_advanced_mode(self, is_advanced):
self._in_advance_mode = is_advanced
@ -293,10 +301,9 @@ class CredentialsDialog(QtWidgets.QDialog):
url = url.strip("/ ")
if not url:
self.set_error((
"You need to specify a valid server URL, "
"for example https://server-name.ftrackapp.com"
))
self.set_error(
"Ftrack URL is not defined in settings!"
)
return
if "http" not in url:

View file

@ -1,3 +1,4 @@
import platform
import collections
from abc import ABCMeta, abstractmethod
@ -40,7 +41,12 @@ class IdleManager(PypeModule, ITrayService):
name = "idle_manager"
def initialize(self, module_settings):
self.enabled = True
enabled = True
# Ignore on MacOs
# - pynput need root permissions and enabled access for application
if platform.system().lower() == "darwin":
enabled = False
self.enabled = enabled
self.time_callbacks = collections.defaultdict(list)
self.idle_thread = None

View file

@ -2,12 +2,18 @@ import os
import re
import copy
import json
import pyblish.api
import clique
import pyblish.api
import openpype.api
import openpype.lib
from openpype.lib import should_decompress, \
get_decompress_dir, decompress
from openpype.lib import (
get_ffmpeg_tool_path,
ffprobe_streams,
should_decompress,
get_decompress_dir,
decompress
)
class ExtractReview(pyblish.api.InstancePlugin):
@ -43,17 +49,11 @@ class ExtractReview(pyblish.api.InstancePlugin):
supported_exts = image_exts + video_exts
# FFmpeg tools paths
ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg")
ffmpeg_path = get_ffmpeg_tool_path("ffmpeg")
# Preset attributes
profiles = None
# Legacy attributes
outputs = {}
ext_filter = []
to_width = 1920
to_height = 1080
def process(self, instance):
self.log.debug(instance.data["representations"])
# Skip review when requested.
@ -72,10 +72,6 @@ class ExtractReview(pyblish.api.InstancePlugin):
).format(instance_label))
return
# Use legacy processing when `profiles` is not set.
if self.profiles is None:
return self.legacy_process(instance)
# Run processing
self.main_process(instance)
@ -726,7 +722,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
# NOTE Skipped using instance's resolution
full_input_path_single_file = temp_data["full_input_path_single_file"]
input_data = openpype.lib.ffprobe_streams(
input_data = ffprobe_streams(
full_input_path_single_file, self.log
)[0]
input_width = int(input_data["width"])
@ -1253,438 +1249,6 @@ class ExtractReview(pyblish.api.InstancePlugin):
return filtered_outputs
def legacy_process(self, instance):
self.log.warning("Legacy review presets are used.")
output_profiles = self.outputs or {}
inst_data = instance.data
context_data = instance.context.data
fps = float(inst_data.get("fps"))
frame_start = inst_data.get("frameStart")
frame_end = inst_data.get("frameEnd")
handle_start = inst_data.get("handleStart",
context_data.get("handleStart"))
handle_end = inst_data.get("handleEnd",
context_data.get("handleEnd"))
pixel_aspect = inst_data.get("pixelAspect", 1)
resolution_width = inst_data.get("resolutionWidth", self.to_width)
resolution_height = inst_data.get("resolutionHeight", self.to_height)
self.log.debug("Families In: `{}`".format(inst_data["families"]))
self.log.debug("__ frame_start: {}".format(frame_start))
self.log.debug("__ frame_end: {}".format(frame_end))
self.log.debug("__ handle_start: {}".format(handle_start))
self.log.debug("__ handle_end: {}".format(handle_end))
# get representation and loop them
representations = inst_data["representations"]
ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg")
# filter out mov and img sequences
representations_new = representations[:]
for repre in representations:
if repre['ext'] not in self.ext_filter:
continue
tags = repre.get("tags", [])
if inst_data.get("multipartExr") is True:
# ffmpeg doesn't support multipart exrs
continue
if "thumbnail" in tags:
continue
self.log.info("Try repre: {}".format(repre))
if "review" not in tags:
continue
staging_dir = repre["stagingDir"]
# iterating preset output profiles
for name, profile in output_profiles.items():
repre_new = repre.copy()
ext = profile.get("ext", None)
p_tags = profile.get('tags', [])
# append repre tags into profile tags
for t in tags:
if t not in p_tags:
p_tags.append(t)
self.log.info("p_tags: `{}`".format(p_tags))
# adding control for presets to be sequence
# or single file
is_sequence = ("sequence" in p_tags) and (ext in (
"png", "jpg", "jpeg"))
# no handles switch from profile tags
no_handles = "no-handles" in p_tags
self.log.debug("Profile name: {}".format(name))
if not ext:
ext = "mov"
self.log.warning(
str("`ext` attribute not in output "
"profile. Setting to default ext: `mov`"))
self.log.debug(
"instance.families: {}".format(
instance.data['families']))
self.log.debug(
"profile.families: {}".format(profile['families']))
profile_family_check = False
for _family in profile['families']:
if _family in instance.data['families']:
profile_family_check = True
break
if not profile_family_check:
continue
if isinstance(repre["files"], list):
collections, remainder = clique.assemble(
repre["files"])
full_input_path = os.path.join(
staging_dir, collections[0].format(
'{head}{padding}{tail}')
)
filename = collections[0].format('{head}')
if filename.endswith('.'):
filename = filename[:-1]
else:
full_input_path = os.path.join(
staging_dir, repre["files"])
filename = repre["files"].split(".")[0]
repr_file = filename + "_{0}.{1}".format(name, ext)
full_output_path = os.path.join(
staging_dir, repr_file)
if is_sequence:
filename_base = filename + "_{0}".format(name)
repr_file = filename_base + ".%08d.{0}".format(
ext)
repre_new["sequence_file"] = repr_file
full_output_path = os.path.join(
staging_dir, filename_base, repr_file)
self.log.info("input {}".format(full_input_path))
self.log.info("output {}".format(full_output_path))
new_tags = [x for x in tags if x != "delete"]
# add families
[instance.data["families"].append(t)
for t in p_tags
if t not in instance.data["families"]]
# add to
[new_tags.append(t) for t in p_tags
if t not in new_tags]
self.log.info("new_tags: `{}`".format(new_tags))
input_args = []
output_args = []
# overrides output file
input_args.append("-y")
# preset's input data
input_args.extend(profile.get('input', []))
# necessary input data
# adds start arg only if image sequence
frame_start_handle = frame_start - handle_start
frame_end_handle = frame_end + handle_end
if isinstance(repre["files"], list):
if frame_start_handle != repre.get(
"detectedStart", frame_start_handle):
frame_start_handle = repre.get("detectedStart")
# exclude handle if no handles defined
if no_handles:
frame_start_handle = frame_start
frame_end_handle = frame_end
input_args.append(
"-start_number {0} -framerate {1}".format(
frame_start_handle, fps))
else:
if no_handles:
start_sec = float(handle_start) / fps
input_args.append("-ss {:0.2f}".format(start_sec))
frame_start_handle = frame_start
frame_end_handle = frame_end
input_args.append("-i {}".format(full_input_path))
for audio in instance.data.get("audio", []):
offset_frames = (
instance.data.get("frameStartFtrack") -
audio["offset"]
)
offset_seconds = offset_frames / fps
if offset_seconds > 0:
input_args.append("-ss")
else:
input_args.append("-itsoffset")
input_args.append(str(abs(offset_seconds)))
input_args.extend(
["-i", audio["filename"]]
)
# Need to merge audio if there are more
# than 1 input.
if len(instance.data["audio"]) > 1:
input_args.extend(
[
"-filter_complex",
"amerge",
"-ac",
"2"
]
)
codec_args = profile.get('codec', [])
output_args.extend(codec_args)
# preset's output data
output_args.extend(profile.get('output', []))
# defining image ratios
resolution_ratio = (
float(resolution_width) * pixel_aspect) / resolution_height
delivery_ratio = float(self.to_width) / float(self.to_height)
self.log.debug(
"__ resolution_ratio: `{}`".format(resolution_ratio))
self.log.debug(
"__ delivery_ratio: `{}`".format(delivery_ratio))
# get scale factor
scale_factor = float(self.to_height) / (
resolution_height * pixel_aspect)
# shorten two decimals long float number for testing conditions
resolution_ratio_test = float(
"{:0.2f}".format(resolution_ratio))
delivery_ratio_test = float(
"{:0.2f}".format(delivery_ratio))
if resolution_ratio_test != delivery_ratio_test:
scale_factor = float(self.to_width) / (
resolution_width * pixel_aspect)
if int(scale_factor * 100) == 100:
scale_factor = (
float(self.to_height) / resolution_height
)
self.log.debug("__ scale_factor: `{}`".format(scale_factor))
# letter_box
lb = profile.get('letter_box', 0)
if lb != 0:
ffmpeg_width = self.to_width
ffmpeg_height = self.to_height
if "reformat" not in p_tags:
lb /= pixel_aspect
if resolution_ratio_test != delivery_ratio_test:
ffmpeg_width = resolution_width
ffmpeg_height = int(
resolution_height * pixel_aspect)
else:
if resolution_ratio_test != delivery_ratio_test:
lb /= scale_factor
else:
lb /= pixel_aspect
output_args.append(str(
"-filter:v scale={0}x{1}:flags=lanczos,"
"setsar=1,drawbox=0:0:iw:"
"round((ih-(iw*(1/{2})))/2):t=fill:"
"c=black,drawbox=0:ih-round((ih-(iw*("
"1/{2})))/2):iw:round((ih-(iw*(1/{2})))"
"/2):t=fill:c=black").format(
ffmpeg_width, ffmpeg_height, lb))
# In case audio is longer than video.
output_args.append("-shortest")
if no_handles:
duration_sec = float(
frame_end_handle - frame_start_handle + 1) / fps
output_args.append("-t {:0.2f}".format(duration_sec))
# output filename
output_args.append(full_output_path)
self.log.debug(
"__ pixel_aspect: `{}`".format(pixel_aspect))
self.log.debug(
"__ resolution_width: `{}`".format(
resolution_width))
self.log.debug(
"__ resolution_height: `{}`".format(
resolution_height))
# scaling none square pixels and 1920 width
if "reformat" in p_tags:
if resolution_ratio_test < delivery_ratio_test:
self.log.debug("lower then delivery")
width_scale = int(self.to_width * scale_factor)
width_half_pad = int((
self.to_width - width_scale) / 2)
height_scale = self.to_height
height_half_pad = 0
else:
self.log.debug("heigher then delivery")
width_scale = self.to_width
width_half_pad = 0
scale_factor = float(self.to_width) / (float(
resolution_width) * pixel_aspect)
self.log.debug(
"__ scale_factor: `{}`".format(
scale_factor))
height_scale = int(
resolution_height * scale_factor)
height_half_pad = int(
(self.to_height - height_scale) / 2)
self.log.debug(
"__ width_scale: `{}`".format(width_scale))
self.log.debug(
"__ width_half_pad: `{}`".format(
width_half_pad))
self.log.debug(
"__ height_scale: `{}`".format(
height_scale))
self.log.debug(
"__ height_half_pad: `{}`".format(
height_half_pad))
scaling_arg = str(
"scale={0}x{1}:flags=lanczos,"
"pad={2}:{3}:{4}:{5}:black,setsar=1"
).format(width_scale, height_scale,
self.to_width, self.to_height,
width_half_pad,
height_half_pad
)
vf_back = self.add_video_filter_args(
output_args, scaling_arg)
# add it to output_args
output_args.insert(0, vf_back)
# baking lut file application
lut_path = instance.data.get("lutPath")
if lut_path and ("bake-lut" in p_tags):
# removing Gama info as it is all baked in lut
gamma = next((g for g in input_args
if "-gamma" in g), None)
if gamma:
input_args.remove(gamma)
# create lut argument
lut_arg = "lut3d=file='{}'".format(
lut_path.replace(
"\\", "/").replace(":/", "\\:/")
)
lut_arg += ",colormatrix=bt601:bt709"
vf_back = self.add_video_filter_args(
output_args, lut_arg)
# add it to output_args
output_args.insert(0, vf_back)
self.log.info("Added Lut to ffmpeg command")
self.log.debug(
"_ output_args: `{}`".format(output_args))
if is_sequence:
stg_dir = os.path.dirname(full_output_path)
if not os.path.exists(stg_dir):
self.log.debug(
"creating dir: {}".format(stg_dir))
os.mkdir(stg_dir)
mov_args = [
"\"{}\"".format(ffmpeg_path),
" ".join(input_args),
" ".join(output_args)
]
subprcs_cmd = " ".join(mov_args)
# run subprocess
self.log.debug("Executing: {}".format(subprcs_cmd))
openpype.api.run_subprocess(
subprcs_cmd, shell=True, logger=self.log
)
# create representation data
repre_new.update({
'name': name,
'ext': ext,
'files': repr_file,
"tags": new_tags,
"outputName": name,
"codec": codec_args,
"_profile": profile,
"resolutionHeight": resolution_height,
"resolutionWidth": resolution_width,
"frameStartFtrack": frame_start_handle,
"frameEndFtrack": frame_end_handle
})
if is_sequence:
repre_new.update({
"stagingDir": stg_dir,
"files": os.listdir(stg_dir)
})
if no_handles:
repre_new.update({
"outputName": name + "_noHandles",
"frameStartFtrack": frame_start,
"frameEndFtrack": frame_end
})
if repre_new.get('preview'):
repre_new.pop("preview")
if repre_new.get('thumbnail'):
repre_new.pop("thumbnail")
# adding representation
self.log.debug("Adding: {}".format(repre_new))
representations_new.append(repre_new)
for repre in representations_new:
if "delete" in repre.get("tags", []):
representations_new.remove(repre)
if "clean_name" in repre.get("tags", []):
repre_new.pop("outputName")
instance.data.update({
"reviewToWidth": self.to_width,
"reviewToHeight": self.to_height
})
self.log.debug(
"new representations: {}".format(representations_new))
instance.data["representations"] = representations_new
self.log.debug("Families Out: `{}`".format(instance.data["families"]))
def add_video_filter_args(self, args, inserting_arg):
"""
Fixing video filter arguments to be one long string

View file

@ -45,7 +45,7 @@ class ExtractScanlineExr(pyblish.api.InstancePlugin):
stagingdir = os.path.normpath(repre.get("stagingDir"))
oiio_tool_path = os.getenv("OPENPYPE_OIIO_PATH", "")
oiio_tool_path = openpype.lib.get_oiio_tools_path()
if not os.path.exists(oiio_tool_path):
self.log.error(
"OIIO tool not found in {}".format(oiio_tool_path))

View file

@ -93,7 +93,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"harmony.palette",
"editorial",
"background",
"camerarig"
"camerarig",
"redshiftproxy"
]
exclude_families = ["clip"]
db_representation_context_keys = [

View file

@ -9,6 +9,11 @@
"enabled": true,
"optional": true,
"active": true
},
"ValidateAssetName": {
"enabled": true,
"optional": true,
"active": true
}
},
"filters": {}

View file

@ -20,6 +20,7 @@
},
"variants": {
"2022": {
"use_python_2": false,
"executables": {
"windows": [
"C:\\Program Files\\Autodesk\\Maya2022\\bin\\maya.exe"
@ -39,6 +40,7 @@
}
},
"2020": {
"use_python_2": true,
"executables": {
"windows": [
"C:\\Program Files\\Autodesk\\Maya2020\\bin\\maya.exe"
@ -58,6 +60,7 @@
}
},
"2019": {
"use_python_2": true,
"executables": {
"windows": [
"C:\\Program Files\\Autodesk\\Maya2019\\bin\\maya.exe"
@ -77,6 +80,7 @@
}
},
"2018": {
"use_python_2": true,
"executables": {
"windows": [
"C:\\Program Files\\Autodesk\\Maya2018\\bin\\maya.exe"
@ -118,6 +122,7 @@
},
"variants": {
"13-0": {
"use_python_2": false,
"executables": {
"windows": [
"C:\\Program Files\\Nuke13.0v1\\Nuke13.0.exe"
@ -135,6 +140,7 @@
"environment": {}
},
"12-2": {
"use_python_2": true,
"executables": {
"windows": [
"C:\\Program Files\\Nuke12.2v3\\Nuke12.2.exe"
@ -152,6 +158,7 @@
"environment": {}
},
"12-0": {
"use_python_2": true,
"executables": {
"windows": [
"C:\\Program Files\\Nuke12.0v1\\Nuke12.0.exe"
@ -169,6 +176,7 @@
"environment": {}
},
"11-3": {
"use_python_2": true,
"executables": {
"windows": [
"C:\\Program Files\\Nuke11.3v1\\Nuke11.3.exe"
@ -186,6 +194,7 @@
"environment": {}
},
"11-2": {
"use_python_2": true,
"executables": {
"windows": [
"C:\\Program Files\\Nuke11.2v2\\Nuke11.2.exe"
@ -227,6 +236,7 @@
},
"variants": {
"13-0": {
"use_python_2": false,
"executables": {
"windows": [
"C:\\Program Files\\Nuke13.0v1\\Nuke13.0.exe"
@ -250,6 +260,7 @@
"environment": {}
},
"12-2": {
"use_python_2": true,
"executables": {
"windows": [
"C:\\Program Files\\Nuke12.2v3\\Nuke12.2.exe"
@ -273,6 +284,7 @@
"environment": {}
},
"12-0": {
"use_python_2": true,
"executables": {
"windows": [
"C:\\Program Files\\Nuke12.0v1\\Nuke12.0.exe"
@ -296,6 +308,7 @@
"environment": {}
},
"11-3": {
"use_python_2": true,
"executables": {
"windows": [
"C:\\Program Files\\Nuke11.3v1\\Nuke11.3.exe"
@ -319,6 +332,7 @@
"environment": {}
},
"11-2": {
"use_python_2": true,
"executables": {
"windows": [
"C:\\Program Files\\Nuke11.2v2\\Nuke11.2.exe"
@ -366,6 +380,7 @@
},
"variants": {
"13-0": {
"use_python_2": false,
"executables": {
"windows": [
"C:\\Program Files\\Nuke13.0v1\\Nuke13.0.exe"
@ -389,6 +404,7 @@
"environment": {}
},
"12-2": {
"use_python_2": true,
"executables": {
"windows": [
"C:\\Program Files\\Nuke12.2v3\\Nuke12.2.exe"
@ -412,6 +428,7 @@
"environment": {}
},
"12-0": {
"use_python_2": true,
"executables": {
"windows": [
"C:\\Program Files\\Nuke12.0v1\\Nuke12.0.exe"
@ -435,6 +452,7 @@
"environment": {}
},
"11-3": {
"use_python_2": true,
"executables": {
"windows": [
"C:\\Program Files\\Nuke11.3v1\\Nuke11.3.exe"
@ -458,6 +476,7 @@
"environment": {}
},
"11-2": {
"use_python_2": true,
"executables": {
"windows": [],
"darwin": [],
@ -503,6 +522,7 @@
},
"variants": {
"13-0": {
"use_python_2": false,
"executables": {
"windows": [
"C:\\Program Files\\Nuke13.0v1\\Nuke13.0.exe"
@ -526,6 +546,7 @@
"environment": {}
},
"12-2": {
"use_python_2": true,
"executables": {
"windows": [
"C:\\Program Files\\Nuke12.2v3\\Nuke12.2.exe"
@ -549,6 +570,7 @@
"environment": {}
},
"12-0": {
"use_python_2": true,
"executables": {
"windows": [
"C:\\Program Files\\Nuke12.0v1\\Nuke12.0.exe"
@ -572,6 +594,7 @@
"environment": {}
},
"11-3": {
"use_python_2": true,
"executables": {
"windows": [
"C:\\Program Files\\Nuke11.3v1\\Nuke11.3.exe"
@ -595,6 +618,7 @@
"environment": {}
},
"11-2": {
"use_python_2": true,
"executables": {
"windows": [
"C:\\Program Files\\Nuke11.2v2\\Nuke11.2.exe"
@ -657,6 +681,7 @@
"16": {
"enabled": true,
"variant_label": "16",
"use_python_2": false,
"executables": {
"windows": [],
"darwin": [],
@ -672,6 +697,7 @@
"9": {
"enabled": true,
"variant_label": "9",
"use_python_2": false,
"executables": {
"windows": [
"C:\\Program Files\\Blackmagic Design\\Fusion 9\\Fusion.exe"
@ -735,6 +761,7 @@
"16": {
"enabled": true,
"variant_label": "16",
"use_python_2": false,
"executables": {
"windows": [
"C:/Program Files/Blackmagic Design/DaVinci Resolve/Resolve.exe"
@ -770,6 +797,7 @@
},
"variants": {
"18-5": {
"use_python_2": true,
"executables": {
"windows": [
"C:\\Program Files\\Side Effects Software\\Houdini 18.5.499\\bin\\houdini.exe"
@ -785,6 +813,7 @@
"environment": {}
},
"18": {
"use_python_2": true,
"executables": {
"windows": [],
"darwin": [],
@ -798,6 +827,7 @@
"environment": {}
},
"17": {
"use_python_2": true,
"executables": {
"windows": [],
"darwin": [],
@ -832,6 +862,7 @@
},
"variants": {
"2-83": {
"use_python_2": false,
"executables": {
"windows": [
"C:\\Program Files\\Blender Foundation\\Blender 2.83\\blender.exe"
@ -853,6 +884,7 @@
"environment": {}
},
"2-90": {
"use_python_2": false,
"executables": {
"windows": [
"C:\\Program Files\\Blender Foundation\\Blender 2.90\\blender.exe"
@ -874,6 +906,7 @@
"environment": {}
},
"2-91": {
"use_python_2": false,
"executables": {
"windows": [
"C:\\Program Files\\Blender Foundation\\Blender 2.91\\blender.exe"
@ -914,6 +947,7 @@
"20": {
"enabled": true,
"variant_label": "20",
"use_python_2": false,
"executables": {
"windows": [],
"darwin": [],
@ -929,6 +963,7 @@
"17": {
"enabled": true,
"variant_label": "17",
"use_python_2": false,
"executables": {
"windows": [],
"darwin": [
@ -955,6 +990,7 @@
},
"variants": {
"animation_11-64bits": {
"use_python_2": false,
"executables": {
"windows": [
"C:\\Program Files\\TVPaint Developpement\\TVPaint Animation 11 (64bits)\\TVPaint Animation 11 (64bits).exe"
@ -970,6 +1006,7 @@
"environment": {}
},
"animation_11-32bits": {
"use_python_2": false,
"executables": {
"windows": [
"C:\\Program Files (x86)\\TVPaint Developpement\\TVPaint Animation 11 (32bits)\\TVPaint Animation 11 (32bits).exe"
@ -1005,6 +1042,7 @@
"2020": {
"enabled": true,
"variant_label": "2020",
"use_python_2": false,
"executables": {
"windows": [
"C:\\Program Files\\Adobe\\Adobe Photoshop 2020\\Photoshop.exe"
@ -1022,6 +1060,7 @@
"2021": {
"enabled": true,
"variant_label": "2021",
"use_python_2": false,
"executables": {
"windows": [
"C:\\Program Files\\Adobe\\Adobe Photoshop 2021\\Photoshop.exe"
@ -1053,6 +1092,7 @@
"2020": {
"enabled": true,
"variant_label": "2020",
"use_python_2": false,
"executables": {
"windows": [
""
@ -1070,6 +1110,7 @@
"2021": {
"enabled": true,
"variant_label": "2021",
"use_python_2": false,
"executables": {
"windows": [
"C:\\Program Files\\Adobe\\Adobe After Effects 2021\\Support Files\\AfterFX.exe"
@ -1098,6 +1139,7 @@
"local": {
"enabled": true,
"variant_label": "Local",
"use_python_2": false,
"executables": {
"windows": [],
"darwin": [],
@ -1119,11 +1161,10 @@
"host_name": "unreal",
"environment": {
"AVALON_UNREAL_PLUGIN": "{OPENPYPE_REPOS_ROOT}/repos/avalon-unreal-integration",
"OPENPYPE_LOG_NO_COLORS": "True",
"QT_PREFERRED_BINDING": "PySide"
"OPENPYPE_LOG_NO_COLORS": "True"
},
"variants": {
"4-24": {
"4-26": {
"executables": {
"windows": [],
"darwin": [],
@ -1143,6 +1184,7 @@
"environment": {},
"variants": {
"python_3-7": {
"use_python_2": true,
"executables": {
"windows": [],
"darwin": [],
@ -1156,6 +1198,7 @@
"environment": {}
},
"python_2-7": {
"use_python_2": true,
"executables": {
"windows": [],
"darwin": [],
@ -1169,6 +1212,7 @@
"environment": {}
},
"terminal": {
"use_python_2": true,
"executables": {
"windows": [],
"darwin": [],
@ -1195,6 +1239,7 @@
"environment": {},
"variants": {
"1-1": {
"use_python_2": false,
"executables": {
"windows": [],
"darwin": [],

View file

@ -2,15 +2,9 @@
"studio_name": "Studio name",
"studio_code": "stu",
"environment": {
"FFMPEG_PATH": {
"windows": "{OPENPYPE_REPOS_ROOT}/vendor/bin/ffmpeg_exec/windows/bin",
"darwin": "{OPENPYPE_REPOS_ROOT}/vendor/bin/ffmpeg_exec/darwin/bin",
"linux": ":{OPENPYPE_REPOS_ROOT}/vendor/bin/ffmpeg_exec/linux"
},
"OPENPYPE_OCIO_CONFIG": "{STUDIO_SOFT}/OpenColorIO-Configs",
"__environment_keys__": {
"global": [
"FFMPEG_PATH",
"OPENPYPE_OCIO_CONFIG"
]
}

View file

@ -376,7 +376,10 @@ class BoolEntity(InputEntity):
def _item_initalization(self):
self.valid_value_types = (bool, )
self.value_on_not_set = True
value_on_not_set = self.convert_to_valid_type(
self.schema_data.get("default", True)
)
self.value_on_not_set = value_on_not_set
class TextEntity(InputEntity):

View file

@ -33,6 +33,17 @@
"docstring": "Validate MarkIn/Out match Frame start/end on shot data"
}
]
},
{
"type": "schema_template",
"name": "template_publish_plugin",
"template_data": [
{
"key": "ValidateAssetName",
"label": "ValidateAssetName",
"docstring": "Validate if shot on instances metadata is same as workfiles shot"
}
]
}
]
},

View file

@ -1,4 +1,10 @@
[
{
"type": "boolean",
"key": "use_python_2",
"label": "Use Python 2",
"default": false
},
{
"type": "path",
"key": "executables",

View file

@ -262,7 +262,10 @@ class MultiSelectionComboBox(QtWidgets.QComboBox):
self.lines[line] = [item]
line += 1
else:
self.lines[line].append(item)
if line in self.lines:
self.lines[line].append(item)
else:
self.lines[line] = [item]
left_x = left_x + width + self.item_spacing
self.update()

View file

@ -0,0 +1,56 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2007, 2009, 2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""dnspython DNS toolkit"""
__all__ = [
'dnssec',
'e164',
'edns',
'entropy',
'exception',
'flags',
'hash',
'inet',
'ipv4',
'ipv6',
'message',
'name',
'namedict',
'node',
'opcode',
'query',
'rcode',
'rdata',
'rdataclass',
'rdataset',
'rdatatype',
'renderer',
'resolver',
'reversename',
'rrset',
'set',
'tokenizer',
'tsig',
'tsigkeyring',
'ttl',
'rdtypes',
'update',
'version',
'wiredata',
'zone',
]

View file

@ -0,0 +1,59 @@
import sys
import decimal
from decimal import Context
PY3 = sys.version_info[0] == 3
PY2 = sys.version_info[0] == 2
if PY3:
long = int
xrange = range
else:
long = long # pylint: disable=long-builtin
xrange = xrange # pylint: disable=xrange-builtin
# unicode / binary types
if PY3:
text_type = str
binary_type = bytes
string_types = (str,)
unichr = chr
def maybe_decode(x):
return x.decode()
def maybe_encode(x):
return x.encode()
def maybe_chr(x):
return x
def maybe_ord(x):
return x
else:
text_type = unicode # pylint: disable=unicode-builtin, undefined-variable
binary_type = str
string_types = (
basestring, # pylint: disable=basestring-builtin, undefined-variable
)
unichr = unichr # pylint: disable=unichr-builtin
def maybe_decode(x):
return x
def maybe_encode(x):
return x
def maybe_chr(x):
return chr(x)
def maybe_ord(x):
return ord(x)
def round_py2_compat(what):
"""
Python 2 and Python 3 use different rounding strategies in round(). This
function ensures that results are python2/3 compatible and backward
compatible with previous py2 releases
:param what: float
:return: rounded long
"""
d = Context(
prec=len(str(long(what))), # round to integer with max precision
rounding=decimal.ROUND_HALF_UP
).create_decimal(str(what)) # str(): python 2.6 compat
return long(d)

View file

@ -0,0 +1,519 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2017 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""Common DNSSEC-related functions and constants."""
from io import BytesIO
import struct
import time
import dns.exception
import dns.name
import dns.node
import dns.rdataset
import dns.rdata
import dns.rdatatype
import dns.rdataclass
from ._compat import string_types
class UnsupportedAlgorithm(dns.exception.DNSException):
"""The DNSSEC algorithm is not supported."""
class ValidationFailure(dns.exception.DNSException):
"""The DNSSEC signature is invalid."""
#: RSAMD5
RSAMD5 = 1
#: DH
DH = 2
#: DSA
DSA = 3
#: ECC
ECC = 4
#: RSASHA1
RSASHA1 = 5
#: DSANSEC3SHA1
DSANSEC3SHA1 = 6
#: RSASHA1NSEC3SHA1
RSASHA1NSEC3SHA1 = 7
#: RSASHA256
RSASHA256 = 8
#: RSASHA512
RSASHA512 = 10
#: ECDSAP256SHA256
ECDSAP256SHA256 = 13
#: ECDSAP384SHA384
ECDSAP384SHA384 = 14
#: INDIRECT
INDIRECT = 252
#: PRIVATEDNS
PRIVATEDNS = 253
#: PRIVATEOID
PRIVATEOID = 254
_algorithm_by_text = {
'RSAMD5': RSAMD5,
'DH': DH,
'DSA': DSA,
'ECC': ECC,
'RSASHA1': RSASHA1,
'DSANSEC3SHA1': DSANSEC3SHA1,
'RSASHA1NSEC3SHA1': RSASHA1NSEC3SHA1,
'RSASHA256': RSASHA256,
'RSASHA512': RSASHA512,
'INDIRECT': INDIRECT,
'ECDSAP256SHA256': ECDSAP256SHA256,
'ECDSAP384SHA384': ECDSAP384SHA384,
'PRIVATEDNS': PRIVATEDNS,
'PRIVATEOID': PRIVATEOID,
}
# We construct the inverse mapping programmatically to ensure that we
# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that
# would cause the mapping not to be true inverse.
_algorithm_by_value = {y: x for x, y in _algorithm_by_text.items()}
def algorithm_from_text(text):
"""Convert text into a DNSSEC algorithm value.
Returns an ``int``.
"""
value = _algorithm_by_text.get(text.upper())
if value is None:
value = int(text)
return value
def algorithm_to_text(value):
"""Convert a DNSSEC algorithm value to text
Returns a ``str``.
"""
text = _algorithm_by_value.get(value)
if text is None:
text = str(value)
return text
def _to_rdata(record, origin):
s = BytesIO()
record.to_wire(s, origin=origin)
return s.getvalue()
def key_id(key, origin=None):
"""Return the key id (a 16-bit number) for the specified key.
Note the *origin* parameter of this function is historical and
is not needed.
Returns an ``int`` between 0 and 65535.
"""
rdata = _to_rdata(key, origin)
rdata = bytearray(rdata)
if key.algorithm == RSAMD5:
return (rdata[-3] << 8) + rdata[-2]
else:
total = 0
for i in range(len(rdata) // 2):
total += (rdata[2 * i] << 8) + \
rdata[2 * i + 1]
if len(rdata) % 2 != 0:
total += rdata[len(rdata) - 1] << 8
total += ((total >> 16) & 0xffff)
return total & 0xffff
def make_ds(name, key, algorithm, origin=None):
"""Create a DS record for a DNSSEC key.
*name* is the owner name of the DS record.
*key* is a ``dns.rdtypes.ANY.DNSKEY``.
*algorithm* is a string describing which hash algorithm to use. The
currently supported hashes are "SHA1" and "SHA256". Case does not
matter for these strings.
*origin* is a ``dns.name.Name`` and will be used as the origin
if *key* is a relative name.
Returns a ``dns.rdtypes.ANY.DS``.
"""
if algorithm.upper() == 'SHA1':
dsalg = 1
hash = SHA1.new()
elif algorithm.upper() == 'SHA256':
dsalg = 2
hash = SHA256.new()
else:
raise UnsupportedAlgorithm('unsupported algorithm "%s"' % algorithm)
if isinstance(name, string_types):
name = dns.name.from_text(name, origin)
hash.update(name.canonicalize().to_wire())
hash.update(_to_rdata(key, origin))
digest = hash.digest()
dsrdata = struct.pack("!HBB", key_id(key), key.algorithm, dsalg) + digest
return dns.rdata.from_wire(dns.rdataclass.IN, dns.rdatatype.DS, dsrdata, 0,
len(dsrdata))
def _find_candidate_keys(keys, rrsig):
candidate_keys = []
value = keys.get(rrsig.signer)
if value is None:
return None
if isinstance(value, dns.node.Node):
try:
rdataset = value.find_rdataset(dns.rdataclass.IN,
dns.rdatatype.DNSKEY)
except KeyError:
return None
else:
rdataset = value
for rdata in rdataset:
if rdata.algorithm == rrsig.algorithm and \
key_id(rdata) == rrsig.key_tag:
candidate_keys.append(rdata)
return candidate_keys
def _is_rsa(algorithm):
return algorithm in (RSAMD5, RSASHA1,
RSASHA1NSEC3SHA1, RSASHA256,
RSASHA512)
def _is_dsa(algorithm):
return algorithm in (DSA, DSANSEC3SHA1)
def _is_ecdsa(algorithm):
return _have_ecdsa and (algorithm in (ECDSAP256SHA256, ECDSAP384SHA384))
def _is_md5(algorithm):
return algorithm == RSAMD5
def _is_sha1(algorithm):
return algorithm in (DSA, RSASHA1,
DSANSEC3SHA1, RSASHA1NSEC3SHA1)
def _is_sha256(algorithm):
return algorithm in (RSASHA256, ECDSAP256SHA256)
def _is_sha384(algorithm):
return algorithm == ECDSAP384SHA384
def _is_sha512(algorithm):
return algorithm == RSASHA512
def _make_hash(algorithm):
if _is_md5(algorithm):
return MD5.new()
if _is_sha1(algorithm):
return SHA1.new()
if _is_sha256(algorithm):
return SHA256.new()
if _is_sha384(algorithm):
return SHA384.new()
if _is_sha512(algorithm):
return SHA512.new()
raise ValidationFailure('unknown hash for algorithm %u' % algorithm)
def _make_algorithm_id(algorithm):
if _is_md5(algorithm):
oid = [0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x02, 0x05]
elif _is_sha1(algorithm):
oid = [0x2b, 0x0e, 0x03, 0x02, 0x1a]
elif _is_sha256(algorithm):
oid = [0x60, 0x86, 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x01]
elif _is_sha512(algorithm):
oid = [0x60, 0x86, 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x03]
else:
raise ValidationFailure('unknown algorithm %u' % algorithm)
olen = len(oid)
dlen = _make_hash(algorithm).digest_size
idbytes = [0x30] + [8 + olen + dlen] + \
[0x30, olen + 4] + [0x06, olen] + oid + \
[0x05, 0x00] + [0x04, dlen]
return struct.pack('!%dB' % len(idbytes), *idbytes)
def _validate_rrsig(rrset, rrsig, keys, origin=None, now=None):
"""Validate an RRset against a single signature rdata
The owner name of *rrsig* is assumed to be the same as the owner name
of *rrset*.
*rrset* is the RRset to validate. It can be a ``dns.rrset.RRset`` or
a ``(dns.name.Name, dns.rdataset.Rdataset)`` tuple.
*rrsig* is a ``dns.rdata.Rdata``, the signature to validate.
*keys* is the key dictionary, used to find the DNSKEY associated with
a given name. The dictionary is keyed by a ``dns.name.Name``, and has
``dns.node.Node`` or ``dns.rdataset.Rdataset`` values.
*origin* is a ``dns.name.Name``, the origin to use for relative names.
*now* is an ``int``, the time to use when validating the signatures,
in seconds since the UNIX epoch. The default is the current time.
"""
if isinstance(origin, string_types):
origin = dns.name.from_text(origin, dns.name.root)
candidate_keys = _find_candidate_keys(keys, rrsig)
if candidate_keys is None:
raise ValidationFailure('unknown key')
for candidate_key in candidate_keys:
# For convenience, allow the rrset to be specified as a (name,
# rdataset) tuple as well as a proper rrset
if isinstance(rrset, tuple):
rrname = rrset[0]
rdataset = rrset[1]
else:
rrname = rrset.name
rdataset = rrset
if now is None:
now = time.time()
if rrsig.expiration < now:
raise ValidationFailure('expired')
if rrsig.inception > now:
raise ValidationFailure('not yet valid')
hash = _make_hash(rrsig.algorithm)
if _is_rsa(rrsig.algorithm):
keyptr = candidate_key.key
(bytes_,) = struct.unpack('!B', keyptr[0:1])
keyptr = keyptr[1:]
if bytes_ == 0:
(bytes_,) = struct.unpack('!H', keyptr[0:2])
keyptr = keyptr[2:]
rsa_e = keyptr[0:bytes_]
rsa_n = keyptr[bytes_:]
try:
pubkey = CryptoRSA.construct(
(number.bytes_to_long(rsa_n),
number.bytes_to_long(rsa_e)))
except ValueError:
raise ValidationFailure('invalid public key')
sig = rrsig.signature
elif _is_dsa(rrsig.algorithm):
keyptr = candidate_key.key
(t,) = struct.unpack('!B', keyptr[0:1])
keyptr = keyptr[1:]
octets = 64 + t * 8
dsa_q = keyptr[0:20]
keyptr = keyptr[20:]
dsa_p = keyptr[0:octets]
keyptr = keyptr[octets:]
dsa_g = keyptr[0:octets]
keyptr = keyptr[octets:]
dsa_y = keyptr[0:octets]
pubkey = CryptoDSA.construct(
(number.bytes_to_long(dsa_y),
number.bytes_to_long(dsa_g),
number.bytes_to_long(dsa_p),
number.bytes_to_long(dsa_q)))
sig = rrsig.signature[1:]
elif _is_ecdsa(rrsig.algorithm):
# use ecdsa for NIST-384p -- not currently supported by pycryptodome
keyptr = candidate_key.key
if rrsig.algorithm == ECDSAP256SHA256:
curve = ecdsa.curves.NIST256p
key_len = 32
elif rrsig.algorithm == ECDSAP384SHA384:
curve = ecdsa.curves.NIST384p
key_len = 48
x = number.bytes_to_long(keyptr[0:key_len])
y = number.bytes_to_long(keyptr[key_len:key_len * 2])
if not ecdsa.ecdsa.point_is_valid(curve.generator, x, y):
raise ValidationFailure('invalid ECDSA key')
point = ecdsa.ellipticcurve.Point(curve.curve, x, y, curve.order)
verifying_key = ecdsa.keys.VerifyingKey.from_public_point(point,
curve)
pubkey = ECKeyWrapper(verifying_key, key_len)
r = rrsig.signature[:key_len]
s = rrsig.signature[key_len:]
sig = ecdsa.ecdsa.Signature(number.bytes_to_long(r),
number.bytes_to_long(s))
else:
raise ValidationFailure('unknown algorithm %u' % rrsig.algorithm)
hash.update(_to_rdata(rrsig, origin)[:18])
hash.update(rrsig.signer.to_digestable(origin))
if rrsig.labels < len(rrname) - 1:
suffix = rrname.split(rrsig.labels + 1)[1]
rrname = dns.name.from_text('*', suffix)
rrnamebuf = rrname.to_digestable(origin)
rrfixed = struct.pack('!HHI', rdataset.rdtype, rdataset.rdclass,
rrsig.original_ttl)
rrlist = sorted(rdataset)
for rr in rrlist:
hash.update(rrnamebuf)
hash.update(rrfixed)
rrdata = rr.to_digestable(origin)
rrlen = struct.pack('!H', len(rrdata))
hash.update(rrlen)
hash.update(rrdata)
try:
if _is_rsa(rrsig.algorithm):
verifier = pkcs1_15.new(pubkey)
# will raise ValueError if verify fails:
verifier.verify(hash, sig)
elif _is_dsa(rrsig.algorithm):
verifier = DSS.new(pubkey, 'fips-186-3')
verifier.verify(hash, sig)
elif _is_ecdsa(rrsig.algorithm):
digest = hash.digest()
if not pubkey.verify(digest, sig):
raise ValueError
else:
# Raise here for code clarity; this won't actually ever happen
# since if the algorithm is really unknown we'd already have
# raised an exception above
raise ValidationFailure('unknown algorithm %u' % rrsig.algorithm)
# If we got here, we successfully verified so we can return without error
return
except ValueError:
# this happens on an individual validation failure
continue
# nothing verified -- raise failure:
raise ValidationFailure('verify failure')
def _validate(rrset, rrsigset, keys, origin=None, now=None):
"""Validate an RRset.
*rrset* is the RRset to validate. It can be a ``dns.rrset.RRset`` or
a ``(dns.name.Name, dns.rdataset.Rdataset)`` tuple.
*rrsigset* is the signature RRset to be validated. It can be a
``dns.rrset.RRset`` or a ``(dns.name.Name, dns.rdataset.Rdataset)`` tuple.
*keys* is the key dictionary, used to find the DNSKEY associated with
a given name. The dictionary is keyed by a ``dns.name.Name``, and has
``dns.node.Node`` or ``dns.rdataset.Rdataset`` values.
*origin* is a ``dns.name.Name``, the origin to use for relative names.
*now* is an ``int``, the time to use when validating the signatures,
in seconds since the UNIX epoch. The default is the current time.
"""
if isinstance(origin, string_types):
origin = dns.name.from_text(origin, dns.name.root)
if isinstance(rrset, tuple):
rrname = rrset[0]
else:
rrname = rrset.name
if isinstance(rrsigset, tuple):
rrsigname = rrsigset[0]
rrsigrdataset = rrsigset[1]
else:
rrsigname = rrsigset.name
rrsigrdataset = rrsigset
rrname = rrname.choose_relativity(origin)
rrsigname = rrsigname.choose_relativity(origin)
if rrname != rrsigname:
raise ValidationFailure("owner names do not match")
for rrsig in rrsigrdataset:
try:
_validate_rrsig(rrset, rrsig, keys, origin, now)
return
except ValidationFailure:
pass
raise ValidationFailure("no RRSIGs validated")
def _need_pycrypto(*args, **kwargs):
raise NotImplementedError("DNSSEC validation requires pycryptodome/pycryptodomex")
try:
try:
# test we're using pycryptodome, not pycrypto (which misses SHA1 for example)
from Crypto.Hash import MD5, SHA1, SHA256, SHA384, SHA512
from Crypto.PublicKey import RSA as CryptoRSA, DSA as CryptoDSA
from Crypto.Signature import pkcs1_15, DSS
from Crypto.Util import number
except ImportError:
from Cryptodome.Hash import MD5, SHA1, SHA256, SHA384, SHA512
from Cryptodome.PublicKey import RSA as CryptoRSA, DSA as CryptoDSA
from Cryptodome.Signature import pkcs1_15, DSS
from Cryptodome.Util import number
except ImportError:
validate = _need_pycrypto
validate_rrsig = _need_pycrypto
_have_pycrypto = False
_have_ecdsa = False
else:
validate = _validate
validate_rrsig = _validate_rrsig
_have_pycrypto = True
try:
import ecdsa
import ecdsa.ecdsa
import ecdsa.ellipticcurve
import ecdsa.keys
except ImportError:
_have_ecdsa = False
else:
_have_ecdsa = True
class ECKeyWrapper(object):
def __init__(self, key, key_len):
self.key = key
self.key_len = key_len
def verify(self, digest, sig):
diglong = number.bytes_to_long(digest)
return self.key.pubkey.verifies(diglong, sig)

View file

@ -0,0 +1,105 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2006-2017 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS E.164 helpers."""
import dns.exception
import dns.name
import dns.resolver
from ._compat import string_types, maybe_decode
#: The public E.164 domain.
public_enum_domain = dns.name.from_text('e164.arpa.')
def from_e164(text, origin=public_enum_domain):
"""Convert an E.164 number in textual form into a Name object whose
value is the ENUM domain name for that number.
Non-digits in the text are ignored, i.e. "16505551212",
"+1.650.555.1212" and "1 (650) 555-1212" are all the same.
*text*, a ``text``, is an E.164 number in textual form.
*origin*, a ``dns.name.Name``, the domain in which the number
should be constructed. The default is ``e164.arpa.``.
Returns a ``dns.name.Name``.
"""
parts = [d for d in text if d.isdigit()]
parts.reverse()
return dns.name.from_text('.'.join(parts), origin=origin)
def to_e164(name, origin=public_enum_domain, want_plus_prefix=True):
"""Convert an ENUM domain name into an E.164 number.
Note that dnspython does not have any information about preferred
number formats within national numbering plans, so all numbers are
emitted as a simple string of digits, prefixed by a '+' (unless
*want_plus_prefix* is ``False``).
*name* is a ``dns.name.Name``, the ENUM domain name.
*origin* is a ``dns.name.Name``, a domain containing the ENUM
domain name. The name is relativized to this domain before being
converted to text. If ``None``, no relativization is done.
*want_plus_prefix* is a ``bool``. If True, add a '+' to the beginning of
the returned number.
Returns a ``text``.
"""
if origin is not None:
name = name.relativize(origin)
dlabels = [d for d in name.labels if d.isdigit() and len(d) == 1]
if len(dlabels) != len(name.labels):
raise dns.exception.SyntaxError('non-digit labels in ENUM domain name')
dlabels.reverse()
text = b''.join(dlabels)
if want_plus_prefix:
text = b'+' + text
return maybe_decode(text)
def query(number, domains, resolver=None):
"""Look for NAPTR RRs for the specified number in the specified domains.
e.g. lookup('16505551212', ['e164.dnspython.org.', 'e164.arpa.'])
*number*, a ``text`` is the number to look for.
*domains* is an iterable containing ``dns.name.Name`` values.
*resolver*, a ``dns.resolver.Resolver``, is the resolver to use. If
``None``, the default resolver is used.
"""
if resolver is None:
resolver = dns.resolver.get_default_resolver()
e_nx = dns.resolver.NXDOMAIN()
for domain in domains:
if isinstance(domain, string_types):
domain = dns.name.from_text(domain)
qname = dns.e164.from_e164(number, domain)
try:
return resolver.query(qname, 'NAPTR')
except dns.resolver.NXDOMAIN as e:
e_nx += e
raise e_nx

View file

@ -0,0 +1,269 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2009-2017 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""EDNS Options"""
from __future__ import absolute_import
import math
import struct
import dns.inet
#: NSID
NSID = 3
#: DAU
DAU = 5
#: DHU
DHU = 6
#: N3U
N3U = 7
#: ECS (client-subnet)
ECS = 8
#: EXPIRE
EXPIRE = 9
#: COOKIE
COOKIE = 10
#: KEEPALIVE
KEEPALIVE = 11
#: PADDING
PADDING = 12
#: CHAIN
CHAIN = 13
class Option(object):
"""Base class for all EDNS option types."""
def __init__(self, otype):
"""Initialize an option.
*otype*, an ``int``, is the option type.
"""
self.otype = otype
def to_wire(self, file):
"""Convert an option to wire format.
"""
raise NotImplementedError
@classmethod
def from_wire(cls, otype, wire, current, olen):
"""Build an EDNS option object from wire format.
*otype*, an ``int``, is the option type.
*wire*, a ``binary``, is the wire-format message.
*current*, an ``int``, is the offset in *wire* of the beginning
of the rdata.
*olen*, an ``int``, is the length of the wire-format option data
Returns a ``dns.edns.Option``.
"""
raise NotImplementedError
def _cmp(self, other):
"""Compare an EDNS option with another option of the same type.
Returns < 0 if < *other*, 0 if == *other*, and > 0 if > *other*.
"""
raise NotImplementedError
def __eq__(self, other):
if not isinstance(other, Option):
return False
if self.otype != other.otype:
return False
return self._cmp(other) == 0
def __ne__(self, other):
if not isinstance(other, Option):
return False
if self.otype != other.otype:
return False
return self._cmp(other) != 0
def __lt__(self, other):
if not isinstance(other, Option) or \
self.otype != other.otype:
return NotImplemented
return self._cmp(other) < 0
def __le__(self, other):
if not isinstance(other, Option) or \
self.otype != other.otype:
return NotImplemented
return self._cmp(other) <= 0
def __ge__(self, other):
if not isinstance(other, Option) or \
self.otype != other.otype:
return NotImplemented
return self._cmp(other) >= 0
def __gt__(self, other):
if not isinstance(other, Option) or \
self.otype != other.otype:
return NotImplemented
return self._cmp(other) > 0
class GenericOption(Option):
"""Generic Option Class
This class is used for EDNS option types for which we have no better
implementation.
"""
def __init__(self, otype, data):
super(GenericOption, self).__init__(otype)
self.data = data
def to_wire(self, file):
file.write(self.data)
def to_text(self):
return "Generic %d" % self.otype
@classmethod
def from_wire(cls, otype, wire, current, olen):
return cls(otype, wire[current: current + olen])
def _cmp(self, other):
if self.data == other.data:
return 0
if self.data > other.data:
return 1
return -1
class ECSOption(Option):
"""EDNS Client Subnet (ECS, RFC7871)"""
def __init__(self, address, srclen=None, scopelen=0):
"""*address*, a ``text``, is the client address information.
*srclen*, an ``int``, the source prefix length, which is the
leftmost number of bits of the address to be used for the
lookup. The default is 24 for IPv4 and 56 for IPv6.
*scopelen*, an ``int``, the scope prefix length. This value
must be 0 in queries, and should be set in responses.
"""
super(ECSOption, self).__init__(ECS)
af = dns.inet.af_for_address(address)
if af == dns.inet.AF_INET6:
self.family = 2
if srclen is None:
srclen = 56
elif af == dns.inet.AF_INET:
self.family = 1
if srclen is None:
srclen = 24
else:
raise ValueError('Bad ip family')
self.address = address
self.srclen = srclen
self.scopelen = scopelen
addrdata = dns.inet.inet_pton(af, address)
nbytes = int(math.ceil(srclen/8.0))
# Truncate to srclen and pad to the end of the last octet needed
# See RFC section 6
self.addrdata = addrdata[:nbytes]
nbits = srclen % 8
if nbits != 0:
last = struct.pack('B', ord(self.addrdata[-1:]) & (0xff << nbits))
self.addrdata = self.addrdata[:-1] + last
def to_text(self):
return "ECS {}/{} scope/{}".format(self.address, self.srclen,
self.scopelen)
def to_wire(self, file):
file.write(struct.pack('!H', self.family))
file.write(struct.pack('!BB', self.srclen, self.scopelen))
file.write(self.addrdata)
@classmethod
def from_wire(cls, otype, wire, cur, olen):
family, src, scope = struct.unpack('!HBB', wire[cur:cur+4])
cur += 4
addrlen = int(math.ceil(src/8.0))
if family == 1:
af = dns.inet.AF_INET
pad = 4 - addrlen
elif family == 2:
af = dns.inet.AF_INET6
pad = 16 - addrlen
else:
raise ValueError('unsupported family')
addr = dns.inet.inet_ntop(af, wire[cur:cur+addrlen] + b'\x00' * pad)
return cls(addr, src, scope)
def _cmp(self, other):
if self.addrdata == other.addrdata:
return 0
if self.addrdata > other.addrdata:
return 1
return -1
_type_to_class = {
ECS: ECSOption
}
def get_option_class(otype):
"""Return the class for the specified option type.
The GenericOption class is used if a more specific class is not
known.
"""
cls = _type_to_class.get(otype)
if cls is None:
cls = GenericOption
return cls
def option_from_wire(otype, wire, current, olen):
"""Build an EDNS option object from wire format.
*otype*, an ``int``, is the option type.
*wire*, a ``binary``, is the wire-format message.
*current*, an ``int``, is the offset in *wire* of the beginning
of the rdata.
*olen*, an ``int``, is the length of the wire-format option data
Returns an instance of a subclass of ``dns.edns.Option``.
"""
cls = get_option_class(otype)
return cls.from_wire(otype, wire, current, olen)

View file

@ -0,0 +1,148 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2009-2017 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import os
import random
import time
from ._compat import long, binary_type
try:
import threading as _threading
except ImportError:
import dummy_threading as _threading
class EntropyPool(object):
# This is an entropy pool for Python implementations that do not
# have a working SystemRandom. I'm not sure there are any, but
# leaving this code doesn't hurt anything as the library code
# is used if present.
def __init__(self, seed=None):
self.pool_index = 0
self.digest = None
self.next_byte = 0
self.lock = _threading.Lock()
try:
import hashlib
self.hash = hashlib.sha1()
self.hash_len = 20
except ImportError:
try:
import sha
self.hash = sha.new()
self.hash_len = 20
except ImportError:
import md5 # pylint: disable=import-error
self.hash = md5.new()
self.hash_len = 16
self.pool = bytearray(b'\0' * self.hash_len)
if seed is not None:
self.stir(bytearray(seed))
self.seeded = True
self.seed_pid = os.getpid()
else:
self.seeded = False
self.seed_pid = 0
def stir(self, entropy, already_locked=False):
if not already_locked:
self.lock.acquire()
try:
for c in entropy:
if self.pool_index == self.hash_len:
self.pool_index = 0
b = c & 0xff
self.pool[self.pool_index] ^= b
self.pool_index += 1
finally:
if not already_locked:
self.lock.release()
def _maybe_seed(self):
if not self.seeded or self.seed_pid != os.getpid():
try:
seed = os.urandom(16)
except Exception:
try:
r = open('/dev/urandom', 'rb', 0)
try:
seed = r.read(16)
finally:
r.close()
except Exception:
seed = str(time.time())
self.seeded = True
self.seed_pid = os.getpid()
self.digest = None
seed = bytearray(seed)
self.stir(seed, True)
def random_8(self):
self.lock.acquire()
try:
self._maybe_seed()
if self.digest is None or self.next_byte == self.hash_len:
self.hash.update(binary_type(self.pool))
self.digest = bytearray(self.hash.digest())
self.stir(self.digest, True)
self.next_byte = 0
value = self.digest[self.next_byte]
self.next_byte += 1
finally:
self.lock.release()
return value
def random_16(self):
return self.random_8() * 256 + self.random_8()
def random_32(self):
return self.random_16() * 65536 + self.random_16()
def random_between(self, first, last):
size = last - first + 1
if size > long(4294967296):
raise ValueError('too big')
if size > 65536:
rand = self.random_32
max = long(4294967295)
elif size > 256:
rand = self.random_16
max = 65535
else:
rand = self.random_8
max = 255
return first + size * rand() // (max + 1)
pool = EntropyPool()
try:
system_random = random.SystemRandom()
except Exception:
system_random = None
def random_16():
if system_random is not None:
return system_random.randrange(0, 65536)
else:
return pool.random_16()
def between(first, last):
if system_random is not None:
return system_random.randrange(first, last + 1)
else:
return pool.random_between(first, last)

View file

@ -0,0 +1,128 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2017 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""Common DNS Exceptions.
Dnspython modules may also define their own exceptions, which will
always be subclasses of ``DNSException``.
"""
class DNSException(Exception):
"""Abstract base class shared by all dnspython exceptions.
It supports two basic modes of operation:
a) Old/compatible mode is used if ``__init__`` was called with
empty *kwargs*. In compatible mode all *args* are passed
to the standard Python Exception class as before and all *args* are
printed by the standard ``__str__`` implementation. Class variable
``msg`` (or doc string if ``msg`` is ``None``) is returned from ``str()``
if *args* is empty.
b) New/parametrized mode is used if ``__init__`` was called with
non-empty *kwargs*.
In the new mode *args* must be empty and all kwargs must match
those set in class variable ``supp_kwargs``. All kwargs are stored inside
``self.kwargs`` and used in a new ``__str__`` implementation to construct
a formatted message based on the ``fmt`` class variable, a ``string``.
In the simplest case it is enough to override the ``supp_kwargs``
and ``fmt`` class variables to get nice parametrized messages.
"""
msg = None # non-parametrized message
supp_kwargs = set() # accepted parameters for _fmt_kwargs (sanity check)
fmt = None # message parametrized with results from _fmt_kwargs
def __init__(self, *args, **kwargs):
self._check_params(*args, **kwargs)
if kwargs:
self.kwargs = self._check_kwargs(**kwargs)
self.msg = str(self)
else:
self.kwargs = dict() # defined but empty for old mode exceptions
if self.msg is None:
# doc string is better implicit message than empty string
self.msg = self.__doc__
if args:
super(DNSException, self).__init__(*args)
else:
super(DNSException, self).__init__(self.msg)
def _check_params(self, *args, **kwargs):
"""Old exceptions supported only args and not kwargs.
For sanity we do not allow to mix old and new behavior."""
if args or kwargs:
assert bool(args) != bool(kwargs), \
'keyword arguments are mutually exclusive with positional args'
def _check_kwargs(self, **kwargs):
if kwargs:
assert set(kwargs.keys()) == self.supp_kwargs, \
'following set of keyword args is required: %s' % (
self.supp_kwargs)
return kwargs
def _fmt_kwargs(self, **kwargs):
"""Format kwargs before printing them.
Resulting dictionary has to have keys necessary for str.format call
on fmt class variable.
"""
fmtargs = {}
for kw, data in kwargs.items():
if isinstance(data, (list, set)):
# convert list of <someobj> to list of str(<someobj>)
fmtargs[kw] = list(map(str, data))
if len(fmtargs[kw]) == 1:
# remove list brackets [] from single-item lists
fmtargs[kw] = fmtargs[kw].pop()
else:
fmtargs[kw] = data
return fmtargs
def __str__(self):
if self.kwargs and self.fmt:
# provide custom message constructed from keyword arguments
fmtargs = self._fmt_kwargs(**self.kwargs)
return self.fmt.format(**fmtargs)
else:
# print *args directly in the same way as old DNSException
return super(DNSException, self).__str__()
class FormError(DNSException):
"""DNS message is malformed."""
class SyntaxError(DNSException):
"""Text input is malformed."""
class UnexpectedEnd(SyntaxError):
"""Text input ended unexpectedly."""
class TooBig(DNSException):
"""The DNS message is too big."""
class Timeout(DNSException):
"""The DNS operation timed out."""
supp_kwargs = {'timeout'}
fmt = "The DNS operation timed out after {timeout} seconds"

View file

@ -0,0 +1,130 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2001-2017 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS Message Flags."""
# Standard DNS flags
#: Query Response
QR = 0x8000
#: Authoritative Answer
AA = 0x0400
#: Truncated Response
TC = 0x0200
#: Recursion Desired
RD = 0x0100
#: Recursion Available
RA = 0x0080
#: Authentic Data
AD = 0x0020
#: Checking Disabled
CD = 0x0010
# EDNS flags
#: DNSSEC answer OK
DO = 0x8000
_by_text = {
'QR': QR,
'AA': AA,
'TC': TC,
'RD': RD,
'RA': RA,
'AD': AD,
'CD': CD
}
_edns_by_text = {
'DO': DO
}
# We construct the inverse mappings programmatically to ensure that we
# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that
# would cause the mappings not to be true inverses.
_by_value = {y: x for x, y in _by_text.items()}
_edns_by_value = {y: x for x, y in _edns_by_text.items()}
def _order_flags(table):
order = list(table.items())
order.sort()
order.reverse()
return order
_flags_order = _order_flags(_by_value)
_edns_flags_order = _order_flags(_edns_by_value)
def _from_text(text, table):
flags = 0
tokens = text.split()
for t in tokens:
flags = flags | table[t.upper()]
return flags
def _to_text(flags, table, order):
text_flags = []
for k, v in order:
if flags & k != 0:
text_flags.append(v)
return ' '.join(text_flags)
def from_text(text):
"""Convert a space-separated list of flag text values into a flags
value.
Returns an ``int``
"""
return _from_text(text, _by_text)
def to_text(flags):
"""Convert a flags value into a space-separated list of flag text
values.
Returns a ``text``.
"""
return _to_text(flags, _by_value, _flags_order)
def edns_from_text(text):
"""Convert a space-separated list of EDNS flag text values into a EDNS
flags value.
Returns an ``int``
"""
return _from_text(text, _edns_by_text)
def edns_to_text(flags):
"""Convert an EDNS flags value into a space-separated list of EDNS flag
text values.
Returns a ``text``.
"""
return _to_text(flags, _edns_by_value, _edns_flags_order)

View file

@ -0,0 +1,69 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2012-2017 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS GENERATE range conversion."""
import dns
def from_text(text):
"""Convert the text form of a range in a ``$GENERATE`` statement to an
integer.
*text*, a ``str``, the textual range in ``$GENERATE`` form.
Returns a tuple of three ``int`` values ``(start, stop, step)``.
"""
# TODO, figure out the bounds on start, stop and step.
step = 1
cur = ''
state = 0
# state 0 1 2 3 4
# x - y / z
if text and text[0] == '-':
raise dns.exception.SyntaxError("Start cannot be a negative number")
for c in text:
if c == '-' and state == 0:
start = int(cur)
cur = ''
state = 2
elif c == '/':
stop = int(cur)
cur = ''
state = 4
elif c.isdigit():
cur += c
else:
raise dns.exception.SyntaxError("Could not parse %s" % (c))
if state in (1, 3):
raise dns.exception.SyntaxError()
if state == 2:
stop = int(cur)
if state == 4:
step = int(cur)
assert step >= 1
assert start >= 0
assert start <= stop
# TODO, can start == stop?
return (start, stop, step)

View file

@ -0,0 +1,37 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""Hashing backwards compatibility wrapper"""
import hashlib
import warnings
warnings.warn(
"dns.hash module will be removed in future versions. Please use hashlib instead.",
DeprecationWarning)
hashes = {}
hashes['MD5'] = hashlib.md5
hashes['SHA1'] = hashlib.sha1
hashes['SHA224'] = hashlib.sha224
hashes['SHA256'] = hashlib.sha256
hashes['SHA384'] = hashlib.sha384
hashes['SHA512'] = hashlib.sha512
def get(algorithm):
return hashes[algorithm.upper()]

View file

@ -0,0 +1,124 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2017 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""Generic Internet address helper functions."""
import socket
import dns.ipv4
import dns.ipv6
from ._compat import maybe_ord
# We assume that AF_INET is always defined.
AF_INET = socket.AF_INET
# AF_INET6 might not be defined in the socket module, but we need it.
# We'll try to use the socket module's value, and if it doesn't work,
# we'll use our own value.
try:
AF_INET6 = socket.AF_INET6
except AttributeError:
AF_INET6 = 9999
def inet_pton(family, text):
"""Convert the textual form of a network address into its binary form.
*family* is an ``int``, the address family.
*text* is a ``text``, the textual address.
Raises ``NotImplementedError`` if the address family specified is not
implemented.
Returns a ``binary``.
"""
if family == AF_INET:
return dns.ipv4.inet_aton(text)
elif family == AF_INET6:
return dns.ipv6.inet_aton(text)
else:
raise NotImplementedError
def inet_ntop(family, address):
"""Convert the binary form of a network address into its textual form.
*family* is an ``int``, the address family.
*address* is a ``binary``, the network address in binary form.
Raises ``NotImplementedError`` if the address family specified is not
implemented.
Returns a ``text``.
"""
if family == AF_INET:
return dns.ipv4.inet_ntoa(address)
elif family == AF_INET6:
return dns.ipv6.inet_ntoa(address)
else:
raise NotImplementedError
def af_for_address(text):
"""Determine the address family of a textual-form network address.
*text*, a ``text``, the textual address.
Raises ``ValueError`` if the address family cannot be determined
from the input.
Returns an ``int``.
"""
try:
dns.ipv4.inet_aton(text)
return AF_INET
except Exception:
try:
dns.ipv6.inet_aton(text)
return AF_INET6
except:
raise ValueError
def is_multicast(text):
"""Is the textual-form network address a multicast address?
*text*, a ``text``, the textual address.
Raises ``ValueError`` if the address family cannot be determined
from the input.
Returns a ``bool``.
"""
try:
first = maybe_ord(dns.ipv4.inet_aton(text)[0])
return first >= 224 and first <= 239
except Exception:
try:
first = maybe_ord(dns.ipv6.inet_aton(text)[0])
return first == 255
except Exception:
raise ValueError

View file

@ -0,0 +1,63 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2017 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""IPv4 helper functions."""
import struct
import dns.exception
from ._compat import binary_type
def inet_ntoa(address):
"""Convert an IPv4 address in binary form to text form.
*address*, a ``binary``, the IPv4 address in binary form.
Returns a ``text``.
"""
if len(address) != 4:
raise dns.exception.SyntaxError
if not isinstance(address, bytearray):
address = bytearray(address)
return ('%u.%u.%u.%u' % (address[0], address[1],
address[2], address[3]))
def inet_aton(text):
"""Convert an IPv4 address in text form to binary form.
*text*, a ``text``, the IPv4 address in textual form.
Returns a ``binary``.
"""
if not isinstance(text, binary_type):
text = text.encode()
parts = text.split(b'.')
if len(parts) != 4:
raise dns.exception.SyntaxError
for part in parts:
if not part.isdigit():
raise dns.exception.SyntaxError
if len(part) > 1 and part[0] == '0':
# No leading zeros
raise dns.exception.SyntaxError
try:
bytes = [int(part) for part in parts]
return struct.pack('BBBB', *bytes)
except:
raise dns.exception.SyntaxError

View file

@ -0,0 +1,181 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2017 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""IPv6 helper functions."""
import re
import binascii
import dns.exception
import dns.ipv4
from ._compat import xrange, binary_type, maybe_decode
_leading_zero = re.compile(r'0+([0-9a-f]+)')
def inet_ntoa(address):
"""Convert an IPv6 address in binary form to text form.
*address*, a ``binary``, the IPv6 address in binary form.
Raises ``ValueError`` if the address isn't 16 bytes long.
Returns a ``text``.
"""
if len(address) != 16:
raise ValueError("IPv6 addresses are 16 bytes long")
hex = binascii.hexlify(address)
chunks = []
i = 0
l = len(hex)
while i < l:
chunk = maybe_decode(hex[i : i + 4])
# strip leading zeros. we do this with an re instead of
# with lstrip() because lstrip() didn't support chars until
# python 2.2.2
m = _leading_zero.match(chunk)
if not m is None:
chunk = m.group(1)
chunks.append(chunk)
i += 4
#
# Compress the longest subsequence of 0-value chunks to ::
#
best_start = 0
best_len = 0
start = -1
last_was_zero = False
for i in xrange(8):
if chunks[i] != '0':
if last_was_zero:
end = i
current_len = end - start
if current_len > best_len:
best_start = start
best_len = current_len
last_was_zero = False
elif not last_was_zero:
start = i
last_was_zero = True
if last_was_zero:
end = 8
current_len = end - start
if current_len > best_len:
best_start = start
best_len = current_len
if best_len > 1:
if best_start == 0 and \
(best_len == 6 or
best_len == 5 and chunks[5] == 'ffff'):
# We have an embedded IPv4 address
if best_len == 6:
prefix = '::'
else:
prefix = '::ffff:'
hex = prefix + dns.ipv4.inet_ntoa(address[12:])
else:
hex = ':'.join(chunks[:best_start]) + '::' + \
':'.join(chunks[best_start + best_len:])
else:
hex = ':'.join(chunks)
return hex
_v4_ending = re.compile(br'(.*):(\d+\.\d+\.\d+\.\d+)$')
_colon_colon_start = re.compile(br'::.*')
_colon_colon_end = re.compile(br'.*::$')
def inet_aton(text):
"""Convert an IPv6 address in text form to binary form.
*text*, a ``text``, the IPv6 address in textual form.
Returns a ``binary``.
"""
#
# Our aim here is not something fast; we just want something that works.
#
if not isinstance(text, binary_type):
text = text.encode()
if text == b'::':
text = b'0::'
#
# Get rid of the icky dot-quad syntax if we have it.
#
m = _v4_ending.match(text)
if not m is None:
b = bytearray(dns.ipv4.inet_aton(m.group(2)))
text = (u"{}:{:02x}{:02x}:{:02x}{:02x}".format(m.group(1).decode(),
b[0], b[1], b[2],
b[3])).encode()
#
# Try to turn '::<whatever>' into ':<whatever>'; if no match try to
# turn '<whatever>::' into '<whatever>:'
#
m = _colon_colon_start.match(text)
if not m is None:
text = text[1:]
else:
m = _colon_colon_end.match(text)
if not m is None:
text = text[:-1]
#
# Now canonicalize into 8 chunks of 4 hex digits each
#
chunks = text.split(b':')
l = len(chunks)
if l > 8:
raise dns.exception.SyntaxError
seen_empty = False
canonical = []
for c in chunks:
if c == b'':
if seen_empty:
raise dns.exception.SyntaxError
seen_empty = True
for i in xrange(0, 8 - l + 1):
canonical.append(b'0000')
else:
lc = len(c)
if lc > 4:
raise dns.exception.SyntaxError
if lc != 4:
c = (b'0' * (4 - lc)) + c
canonical.append(c)
if l < 8 and not seen_empty:
raise dns.exception.SyntaxError
text = b''.join(canonical)
#
# Finally we can go to binary.
#
try:
return binascii.unhexlify(text)
except (binascii.Error, TypeError):
raise dns.exception.SyntaxError
_mapped_prefix = b'\x00' * 10 + b'\xff\xff'
def is_mapped(address):
"""Is the specified address a mapped IPv4 address?
*address*, a ``binary`` is an IPv6 address in binary form.
Returns a ``bool``.
"""
return address.startswith(_mapped_prefix)

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,994 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2001-2017 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS Names.
"""
from io import BytesIO
import struct
import sys
import copy
import encodings.idna
try:
import idna
have_idna_2008 = True
except ImportError:
have_idna_2008 = False
import dns.exception
import dns.wiredata
from ._compat import long, binary_type, text_type, unichr, maybe_decode
try:
maxint = sys.maxint # pylint: disable=sys-max-int
except AttributeError:
maxint = (1 << (8 * struct.calcsize("P"))) // 2 - 1
# fullcompare() result values
#: The compared names have no relationship to each other.
NAMERELN_NONE = 0
#: the first name is a superdomain of the second.
NAMERELN_SUPERDOMAIN = 1
#: The first name is a subdomain of the second.
NAMERELN_SUBDOMAIN = 2
#: The compared names are equal.
NAMERELN_EQUAL = 3
#: The compared names have a common ancestor.
NAMERELN_COMMONANCESTOR = 4
class EmptyLabel(dns.exception.SyntaxError):
"""A DNS label is empty."""
class BadEscape(dns.exception.SyntaxError):
"""An escaped code in a text format of DNS name is invalid."""
class BadPointer(dns.exception.FormError):
"""A DNS compression pointer points forward instead of backward."""
class BadLabelType(dns.exception.FormError):
"""The label type in DNS name wire format is unknown."""
class NeedAbsoluteNameOrOrigin(dns.exception.DNSException):
"""An attempt was made to convert a non-absolute name to
wire when there was also a non-absolute (or missing) origin."""
class NameTooLong(dns.exception.FormError):
"""A DNS name is > 255 octets long."""
class LabelTooLong(dns.exception.SyntaxError):
"""A DNS label is > 63 octets long."""
class AbsoluteConcatenation(dns.exception.DNSException):
"""An attempt was made to append anything other than the
empty name to an absolute DNS name."""
class NoParent(dns.exception.DNSException):
"""An attempt was made to get the parent of the root name
or the empty name."""
class NoIDNA2008(dns.exception.DNSException):
"""IDNA 2008 processing was requested but the idna module is not
available."""
class IDNAException(dns.exception.DNSException):
"""IDNA processing raised an exception."""
supp_kwargs = {'idna_exception'}
fmt = "IDNA processing exception: {idna_exception}"
class IDNACodec(object):
"""Abstract base class for IDNA encoder/decoders."""
def __init__(self):
pass
def encode(self, label):
raise NotImplementedError
def decode(self, label):
# We do not apply any IDNA policy on decode; we just
downcased = label.lower()
if downcased.startswith(b'xn--'):
try:
label = downcased[4:].decode('punycode')
except Exception as e:
raise IDNAException(idna_exception=e)
else:
label = maybe_decode(label)
return _escapify(label, True)
class IDNA2003Codec(IDNACodec):
"""IDNA 2003 encoder/decoder."""
def __init__(self, strict_decode=False):
"""Initialize the IDNA 2003 encoder/decoder.
*strict_decode* is a ``bool``. If `True`, then IDNA2003 checking
is done when decoding. This can cause failures if the name
was encoded with IDNA2008. The default is `False`.
"""
super(IDNA2003Codec, self).__init__()
self.strict_decode = strict_decode
def encode(self, label):
"""Encode *label*."""
if label == '':
return b''
try:
return encodings.idna.ToASCII(label)
except UnicodeError:
raise LabelTooLong
def decode(self, label):
"""Decode *label*."""
if not self.strict_decode:
return super(IDNA2003Codec, self).decode(label)
if label == b'':
return u''
try:
return _escapify(encodings.idna.ToUnicode(label), True)
except Exception as e:
raise IDNAException(idna_exception=e)
class IDNA2008Codec(IDNACodec):
"""IDNA 2008 encoder/decoder.
*uts_46* is a ``bool``. If True, apply Unicode IDNA
compatibility processing as described in Unicode Technical
Standard #46 (http://unicode.org/reports/tr46/).
If False, do not apply the mapping. The default is False.
*transitional* is a ``bool``: If True, use the
"transitional" mode described in Unicode Technical Standard
#46. The default is False.
*allow_pure_ascii* is a ``bool``. If True, then a label which
consists of only ASCII characters is allowed. This is less
strict than regular IDNA 2008, but is also necessary for mixed
names, e.g. a name with starting with "_sip._tcp." and ending
in an IDN suffix which would otherwise be disallowed. The
default is False.
*strict_decode* is a ``bool``: If True, then IDNA2008 checking
is done when decoding. This can cause failures if the name
was encoded with IDNA2003. The default is False.
"""
def __init__(self, uts_46=False, transitional=False,
allow_pure_ascii=False, strict_decode=False):
"""Initialize the IDNA 2008 encoder/decoder."""
super(IDNA2008Codec, self).__init__()
self.uts_46 = uts_46
self.transitional = transitional
self.allow_pure_ascii = allow_pure_ascii
self.strict_decode = strict_decode
def is_all_ascii(self, label):
for c in label:
if ord(c) > 0x7f:
return False
return True
def encode(self, label):
if label == '':
return b''
if self.allow_pure_ascii and self.is_all_ascii(label):
return label.encode('ascii')
if not have_idna_2008:
raise NoIDNA2008
try:
if self.uts_46:
label = idna.uts46_remap(label, False, self.transitional)
return idna.alabel(label)
except idna.IDNAError as e:
raise IDNAException(idna_exception=e)
def decode(self, label):
if not self.strict_decode:
return super(IDNA2008Codec, self).decode(label)
if label == b'':
return u''
if not have_idna_2008:
raise NoIDNA2008
try:
if self.uts_46:
label = idna.uts46_remap(label, False, False)
return _escapify(idna.ulabel(label), True)
except idna.IDNAError as e:
raise IDNAException(idna_exception=e)
_escaped = bytearray(b'"().;\\@$')
IDNA_2003_Practical = IDNA2003Codec(False)
IDNA_2003_Strict = IDNA2003Codec(True)
IDNA_2003 = IDNA_2003_Practical
IDNA_2008_Practical = IDNA2008Codec(True, False, True, False)
IDNA_2008_UTS_46 = IDNA2008Codec(True, False, False, False)
IDNA_2008_Strict = IDNA2008Codec(False, False, False, True)
IDNA_2008_Transitional = IDNA2008Codec(True, True, False, False)
IDNA_2008 = IDNA_2008_Practical
def _escapify(label, unicode_mode=False):
"""Escape the characters in label which need it.
@param unicode_mode: escapify only special and whitespace (<= 0x20)
characters
@returns: the escaped string
@rtype: string"""
if not unicode_mode:
text = ''
if isinstance(label, text_type):
label = label.encode()
for c in bytearray(label):
if c in _escaped:
text += '\\' + chr(c)
elif c > 0x20 and c < 0x7F:
text += chr(c)
else:
text += '\\%03d' % c
return text.encode()
text = u''
if isinstance(label, binary_type):
label = label.decode()
for c in label:
if c > u'\x20' and c < u'\x7f':
text += c
else:
if c >= u'\x7f':
text += c
else:
text += u'\\%03d' % ord(c)
return text
def _validate_labels(labels):
"""Check for empty labels in the middle of a label sequence,
labels that are too long, and for too many labels.
Raises ``dns.name.NameTooLong`` if the name as a whole is too long.
Raises ``dns.name.EmptyLabel`` if a label is empty (i.e. the root
label) and appears in a position other than the end of the label
sequence
"""
l = len(labels)
total = 0
i = -1
j = 0
for label in labels:
ll = len(label)
total += ll + 1
if ll > 63:
raise LabelTooLong
if i < 0 and label == b'':
i = j
j += 1
if total > 255:
raise NameTooLong
if i >= 0 and i != l - 1:
raise EmptyLabel
def _maybe_convert_to_binary(label):
"""If label is ``text``, convert it to ``binary``. If it is already
``binary`` just return it.
"""
if isinstance(label, binary_type):
return label
if isinstance(label, text_type):
return label.encode()
raise ValueError
class Name(object):
"""A DNS name.
The dns.name.Name class represents a DNS name as a tuple of
labels. Each label is a `binary` in DNS wire format. Instances
of the class are immutable.
"""
__slots__ = ['labels']
def __init__(self, labels):
"""*labels* is any iterable whose values are ``text`` or ``binary``.
"""
labels = [_maybe_convert_to_binary(x) for x in labels]
super(Name, self).__setattr__('labels', tuple(labels))
_validate_labels(self.labels)
def __setattr__(self, name, value):
# Names are immutable
raise TypeError("object doesn't support attribute assignment")
def __copy__(self):
return Name(self.labels)
def __deepcopy__(self, memo):
return Name(copy.deepcopy(self.labels, memo))
def __getstate__(self):
# Names can be pickled
return {'labels': self.labels}
def __setstate__(self, state):
super(Name, self).__setattr__('labels', state['labels'])
_validate_labels(self.labels)
def is_absolute(self):
"""Is the most significant label of this name the root label?
Returns a ``bool``.
"""
return len(self.labels) > 0 and self.labels[-1] == b''
def is_wild(self):
"""Is this name wild? (I.e. Is the least significant label '*'?)
Returns a ``bool``.
"""
return len(self.labels) > 0 and self.labels[0] == b'*'
def __hash__(self):
"""Return a case-insensitive hash of the name.
Returns an ``int``.
"""
h = long(0)
for label in self.labels:
for c in bytearray(label.lower()):
h += (h << 3) + c
return int(h % maxint)
def fullcompare(self, other):
"""Compare two names, returning a 3-tuple
``(relation, order, nlabels)``.
*relation* describes the relation ship between the names,
and is one of: ``dns.name.NAMERELN_NONE``,
``dns.name.NAMERELN_SUPERDOMAIN``, ``dns.name.NAMERELN_SUBDOMAIN``,
``dns.name.NAMERELN_EQUAL``, or ``dns.name.NAMERELN_COMMONANCESTOR``.
*order* is < 0 if *self* < *other*, > 0 if *self* > *other*, and ==
0 if *self* == *other*. A relative name is always less than an
absolute name. If both names have the same relativity, then
the DNSSEC order relation is used to order them.
*nlabels* is the number of significant labels that the two names
have in common.
Here are some examples. Names ending in "." are absolute names,
those not ending in "." are relative names.
============= ============= =========== ===== =======
self other relation order nlabels
============= ============= =========== ===== =======
www.example. www.example. equal 0 3
www.example. example. subdomain > 0 2
example. www.example. superdomain < 0 2
example1.com. example2.com. common anc. < 0 2
example1 example2. none < 0 0
example1. example2 none > 0 0
============= ============= =========== ===== =======
"""
sabs = self.is_absolute()
oabs = other.is_absolute()
if sabs != oabs:
if sabs:
return (NAMERELN_NONE, 1, 0)
else:
return (NAMERELN_NONE, -1, 0)
l1 = len(self.labels)
l2 = len(other.labels)
ldiff = l1 - l2
if ldiff < 0:
l = l1
else:
l = l2
order = 0
nlabels = 0
namereln = NAMERELN_NONE
while l > 0:
l -= 1
l1 -= 1
l2 -= 1
label1 = self.labels[l1].lower()
label2 = other.labels[l2].lower()
if label1 < label2:
order = -1
if nlabels > 0:
namereln = NAMERELN_COMMONANCESTOR
return (namereln, order, nlabels)
elif label1 > label2:
order = 1
if nlabels > 0:
namereln = NAMERELN_COMMONANCESTOR
return (namereln, order, nlabels)
nlabels += 1
order = ldiff
if ldiff < 0:
namereln = NAMERELN_SUPERDOMAIN
elif ldiff > 0:
namereln = NAMERELN_SUBDOMAIN
else:
namereln = NAMERELN_EQUAL
return (namereln, order, nlabels)
def is_subdomain(self, other):
"""Is self a subdomain of other?
Note that the notion of subdomain includes equality, e.g.
"dnpython.org" is a subdomain of itself.
Returns a ``bool``.
"""
(nr, o, nl) = self.fullcompare(other)
if nr == NAMERELN_SUBDOMAIN or nr == NAMERELN_EQUAL:
return True
return False
def is_superdomain(self, other):
"""Is self a superdomain of other?
Note that the notion of superdomain includes equality, e.g.
"dnpython.org" is a superdomain of itself.
Returns a ``bool``.
"""
(nr, o, nl) = self.fullcompare(other)
if nr == NAMERELN_SUPERDOMAIN or nr == NAMERELN_EQUAL:
return True
return False
def canonicalize(self):
"""Return a name which is equal to the current name, but is in
DNSSEC canonical form.
"""
return Name([x.lower() for x in self.labels])
def __eq__(self, other):
if isinstance(other, Name):
return self.fullcompare(other)[1] == 0
else:
return False
def __ne__(self, other):
if isinstance(other, Name):
return self.fullcompare(other)[1] != 0
else:
return True
def __lt__(self, other):
if isinstance(other, Name):
return self.fullcompare(other)[1] < 0
else:
return NotImplemented
def __le__(self, other):
if isinstance(other, Name):
return self.fullcompare(other)[1] <= 0
else:
return NotImplemented
def __ge__(self, other):
if isinstance(other, Name):
return self.fullcompare(other)[1] >= 0
else:
return NotImplemented
def __gt__(self, other):
if isinstance(other, Name):
return self.fullcompare(other)[1] > 0
else:
return NotImplemented
def __repr__(self):
return '<DNS name ' + self.__str__() + '>'
def __str__(self):
return self.to_text(False)
def to_text(self, omit_final_dot=False):
"""Convert name to DNS text format.
*omit_final_dot* is a ``bool``. If True, don't emit the final
dot (denoting the root label) for absolute names. The default
is False.
Returns a ``text``.
"""
if len(self.labels) == 0:
return maybe_decode(b'@')
if len(self.labels) == 1 and self.labels[0] == b'':
return maybe_decode(b'.')
if omit_final_dot and self.is_absolute():
l = self.labels[:-1]
else:
l = self.labels
s = b'.'.join(map(_escapify, l))
return maybe_decode(s)
def to_unicode(self, omit_final_dot=False, idna_codec=None):
"""Convert name to Unicode text format.
IDN ACE labels are converted to Unicode.
*omit_final_dot* is a ``bool``. If True, don't emit the final
dot (denoting the root label) for absolute names. The default
is False.
*idna_codec* specifies the IDNA encoder/decoder. If None, the
dns.name.IDNA_2003_Practical encoder/decoder is used.
The IDNA_2003_Practical decoder does
not impose any policy, it just decodes punycode, so if you
don't want checking for compliance, you can use this decoder
for IDNA2008 as well.
Returns a ``text``.
"""
if len(self.labels) == 0:
return u'@'
if len(self.labels) == 1 and self.labels[0] == b'':
return u'.'
if omit_final_dot and self.is_absolute():
l = self.labels[:-1]
else:
l = self.labels
if idna_codec is None:
idna_codec = IDNA_2003_Practical
return u'.'.join([idna_codec.decode(x) for x in l])
def to_digestable(self, origin=None):
"""Convert name to a format suitable for digesting in hashes.
The name is canonicalized and converted to uncompressed wire
format. All names in wire format are absolute. If the name
is a relative name, then an origin must be supplied.
*origin* is a ``dns.name.Name`` or ``None``. If the name is
relative and origin is not ``None``, then origin will be appended
to the name.
Raises ``dns.name.NeedAbsoluteNameOrOrigin`` if the name is
relative and no origin was provided.
Returns a ``binary``.
"""
if not self.is_absolute():
if origin is None or not origin.is_absolute():
raise NeedAbsoluteNameOrOrigin
labels = list(self.labels)
labels.extend(list(origin.labels))
else:
labels = self.labels
dlabels = [struct.pack('!B%ds' % len(x), len(x), x.lower())
for x in labels]
return b''.join(dlabels)
def to_wire(self, file=None, compress=None, origin=None):
"""Convert name to wire format, possibly compressing it.
*file* is the file where the name is emitted (typically a
BytesIO file). If ``None`` (the default), a ``binary``
containing the wire name will be returned.
*compress*, a ``dict``, is the compression table to use. If
``None`` (the default), names will not be compressed.
*origin* is a ``dns.name.Name`` or ``None``. If the name is
relative and origin is not ``None``, then *origin* will be appended
to it.
Raises ``dns.name.NeedAbsoluteNameOrOrigin`` if the name is
relative and no origin was provided.
Returns a ``binary`` or ``None``.
"""
if file is None:
file = BytesIO()
want_return = True
else:
want_return = False
if not self.is_absolute():
if origin is None or not origin.is_absolute():
raise NeedAbsoluteNameOrOrigin
labels = list(self.labels)
labels.extend(list(origin.labels))
else:
labels = self.labels
i = 0
for label in labels:
n = Name(labels[i:])
i += 1
if compress is not None:
pos = compress.get(n)
else:
pos = None
if pos is not None:
value = 0xc000 + pos
s = struct.pack('!H', value)
file.write(s)
break
else:
if compress is not None and len(n) > 1:
pos = file.tell()
if pos <= 0x3fff:
compress[n] = pos
l = len(label)
file.write(struct.pack('!B', l))
if l > 0:
file.write(label)
if want_return:
return file.getvalue()
def __len__(self):
"""The length of the name (in labels).
Returns an ``int``.
"""
return len(self.labels)
def __getitem__(self, index):
return self.labels[index]
def __add__(self, other):
return self.concatenate(other)
def __sub__(self, other):
return self.relativize(other)
def split(self, depth):
"""Split a name into a prefix and suffix names at the specified depth.
*depth* is an ``int`` specifying the number of labels in the suffix
Raises ``ValueError`` if *depth* was not >= 0 and <= the length of the
name.
Returns the tuple ``(prefix, suffix)``.
"""
l = len(self.labels)
if depth == 0:
return (self, dns.name.empty)
elif depth == l:
return (dns.name.empty, self)
elif depth < 0 or depth > l:
raise ValueError(
'depth must be >= 0 and <= the length of the name')
return (Name(self[: -depth]), Name(self[-depth:]))
def concatenate(self, other):
"""Return a new name which is the concatenation of self and other.
Raises ``dns.name.AbsoluteConcatenation`` if the name is
absolute and *other* is not the empty name.
Returns a ``dns.name.Name``.
"""
if self.is_absolute() and len(other) > 0:
raise AbsoluteConcatenation
labels = list(self.labels)
labels.extend(list(other.labels))
return Name(labels)
def relativize(self, origin):
"""If the name is a subdomain of *origin*, return a new name which is
the name relative to origin. Otherwise return the name.
For example, relativizing ``www.dnspython.org.`` to origin
``dnspython.org.`` returns the name ``www``. Relativizing ``example.``
to origin ``dnspython.org.`` returns ``example.``.
Returns a ``dns.name.Name``.
"""
if origin is not None and self.is_subdomain(origin):
return Name(self[: -len(origin)])
else:
return self
def derelativize(self, origin):
"""If the name is a relative name, return a new name which is the
concatenation of the name and origin. Otherwise return the name.
For example, derelativizing ``www`` to origin ``dnspython.org.``
returns the name ``www.dnspython.org.``. Derelativizing ``example.``
to origin ``dnspython.org.`` returns ``example.``.
Returns a ``dns.name.Name``.
"""
if not self.is_absolute():
return self.concatenate(origin)
else:
return self
def choose_relativity(self, origin=None, relativize=True):
"""Return a name with the relativity desired by the caller.
If *origin* is ``None``, then the name is returned.
Otherwise, if *relativize* is ``True`` the name is
relativized, and if *relativize* is ``False`` the name is
derelativized.
Returns a ``dns.name.Name``.
"""
if origin:
if relativize:
return self.relativize(origin)
else:
return self.derelativize(origin)
else:
return self
def parent(self):
"""Return the parent of the name.
For example, the parent of ``www.dnspython.org.`` is ``dnspython.org``.
Raises ``dns.name.NoParent`` if the name is either the root name or the
empty name, and thus has no parent.
Returns a ``dns.name.Name``.
"""
if self == root or self == empty:
raise NoParent
return Name(self.labels[1:])
#: The root name, '.'
root = Name([b''])
#: The empty name.
empty = Name([])
def from_unicode(text, origin=root, idna_codec=None):
"""Convert unicode text into a Name object.
Labels are encoded in IDN ACE form according to rules specified by
the IDNA codec.
*text*, a ``text``, is the text to convert into a name.
*origin*, a ``dns.name.Name``, specifies the origin to
append to non-absolute names. The default is the root name.
*idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA
encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder
is used.
Returns a ``dns.name.Name``.
"""
if not isinstance(text, text_type):
raise ValueError("input to from_unicode() must be a unicode string")
if not (origin is None or isinstance(origin, Name)):
raise ValueError("origin must be a Name or None")
labels = []
label = u''
escaping = False
edigits = 0
total = 0
if idna_codec is None:
idna_codec = IDNA_2003
if text == u'@':
text = u''
if text:
if text == u'.':
return Name([b'']) # no Unicode "u" on this constant!
for c in text:
if escaping:
if edigits == 0:
if c.isdigit():
total = int(c)
edigits += 1
else:
label += c
escaping = False
else:
if not c.isdigit():
raise BadEscape
total *= 10
total += int(c)
edigits += 1
if edigits == 3:
escaping = False
label += unichr(total)
elif c in [u'.', u'\u3002', u'\uff0e', u'\uff61']:
if len(label) == 0:
raise EmptyLabel
labels.append(idna_codec.encode(label))
label = u''
elif c == u'\\':
escaping = True
edigits = 0
total = 0
else:
label += c
if escaping:
raise BadEscape
if len(label) > 0:
labels.append(idna_codec.encode(label))
else:
labels.append(b'')
if (len(labels) == 0 or labels[-1] != b'') and origin is not None:
labels.extend(list(origin.labels))
return Name(labels)
def from_text(text, origin=root, idna_codec=None):
"""Convert text into a Name object.
*text*, a ``text``, is the text to convert into a name.
*origin*, a ``dns.name.Name``, specifies the origin to
append to non-absolute names. The default is the root name.
*idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA
encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder
is used.
Returns a ``dns.name.Name``.
"""
if isinstance(text, text_type):
return from_unicode(text, origin, idna_codec)
if not isinstance(text, binary_type):
raise ValueError("input to from_text() must be a string")
if not (origin is None or isinstance(origin, Name)):
raise ValueError("origin must be a Name or None")
labels = []
label = b''
escaping = False
edigits = 0
total = 0
if text == b'@':
text = b''
if text:
if text == b'.':
return Name([b''])
for c in bytearray(text):
byte_ = struct.pack('!B', c)
if escaping:
if edigits == 0:
if byte_.isdigit():
total = int(byte_)
edigits += 1
else:
label += byte_
escaping = False
else:
if not byte_.isdigit():
raise BadEscape
total *= 10
total += int(byte_)
edigits += 1
if edigits == 3:
escaping = False
label += struct.pack('!B', total)
elif byte_ == b'.':
if len(label) == 0:
raise EmptyLabel
labels.append(label)
label = b''
elif byte_ == b'\\':
escaping = True
edigits = 0
total = 0
else:
label += byte_
if escaping:
raise BadEscape
if len(label) > 0:
labels.append(label)
else:
labels.append(b'')
if (len(labels) == 0 or labels[-1] != b'') and origin is not None:
labels.extend(list(origin.labels))
return Name(labels)
def from_wire(message, current):
"""Convert possibly compressed wire format into a Name.
*message* is a ``binary`` containing an entire DNS message in DNS
wire form.
*current*, an ``int``, is the offset of the beginning of the name
from the start of the message
Raises ``dns.name.BadPointer`` if a compression pointer did not
point backwards in the message.
Raises ``dns.name.BadLabelType`` if an invalid label type was encountered.
Returns a ``(dns.name.Name, int)`` tuple consisting of the name
that was read and the number of bytes of the wire format message
which were consumed reading it.
"""
if not isinstance(message, binary_type):
raise ValueError("input to from_wire() must be a byte string")
message = dns.wiredata.maybe_wrap(message)
labels = []
biggest_pointer = current
hops = 0
count = message[current]
current += 1
cused = 1
while count != 0:
if count < 64:
labels.append(message[current: current + count].unwrap())
current += count
if hops == 0:
cused += count
elif count >= 192:
current = (count & 0x3f) * 256 + message[current]
if hops == 0:
cused += 1
if current >= biggest_pointer:
raise BadPointer
biggest_pointer = current
hops += 1
else:
raise BadLabelType
count = message[current]
current += 1
if hops == 0:
cused += 1
labels.append('')
return (Name(labels), cused)

View file

@ -0,0 +1,108 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2017 Nominum, Inc.
# Copyright (C) 2016 Coresec Systems AB
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND CORESEC SYSTEMS AB DISCLAIMS ALL
# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL CORESEC
# SYSTEMS AB BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS name dictionary"""
import collections
import dns.name
from ._compat import xrange
class NameDict(collections.MutableMapping):
"""A dictionary whose keys are dns.name.Name objects.
In addition to being like a regular Python dictionary, this
dictionary can also get the deepest match for a given key.
"""
__slots__ = ["max_depth", "max_depth_items", "__store"]
def __init__(self, *args, **kwargs):
super(NameDict, self).__init__()
self.__store = dict()
#: the maximum depth of the keys that have ever been added
self.max_depth = 0
#: the number of items of maximum depth
self.max_depth_items = 0
self.update(dict(*args, **kwargs))
def __update_max_depth(self, key):
if len(key) == self.max_depth:
self.max_depth_items = self.max_depth_items + 1
elif len(key) > self.max_depth:
self.max_depth = len(key)
self.max_depth_items = 1
def __getitem__(self, key):
return self.__store[key]
def __setitem__(self, key, value):
if not isinstance(key, dns.name.Name):
raise ValueError('NameDict key must be a name')
self.__store[key] = value
self.__update_max_depth(key)
def __delitem__(self, key):
value = self.__store.pop(key)
if len(value) == self.max_depth:
self.max_depth_items = self.max_depth_items - 1
if self.max_depth_items == 0:
self.max_depth = 0
for k in self.__store:
self.__update_max_depth(k)
def __iter__(self):
return iter(self.__store)
def __len__(self):
return len(self.__store)
def has_key(self, key):
return key in self.__store
def get_deepest_match(self, name):
"""Find the deepest match to *fname* in the dictionary.
The deepest match is the longest name in the dictionary which is
a superdomain of *name*. Note that *superdomain* includes matching
*name* itself.
*name*, a ``dns.name.Name``, the name to find.
Returns a ``(key, value)`` where *key* is the deepest
``dns.name.Name``, and *value* is the value associated with *key*.
"""
depth = len(name)
if depth > self.max_depth:
depth = self.max_depth
for i in xrange(-depth, 0):
n = dns.name.Name(name[i:])
if n in self:
return (n, self[n])
v = self[dns.name.empty]
return (dns.name.empty, v)

View file

@ -0,0 +1,182 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2001-2017 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS nodes. A node is a set of rdatasets."""
from io import StringIO
import dns.rdataset
import dns.rdatatype
import dns.renderer
class Node(object):
"""A Node is a set of rdatasets."""
__slots__ = ['rdatasets']
def __init__(self):
#: the set of rdatsets, represented as a list.
self.rdatasets = []
def to_text(self, name, **kw):
"""Convert a node to text format.
Each rdataset at the node is printed. Any keyword arguments
to this method are passed on to the rdataset's to_text() method.
*name*, a ``dns.name.Name`` or ``text``, the owner name of the rdatasets.
Returns a ``text``.
"""
s = StringIO()
for rds in self.rdatasets:
if len(rds) > 0:
s.write(rds.to_text(name, **kw))
s.write(u'\n')
return s.getvalue()[:-1]
def __repr__(self):
return '<DNS node ' + str(id(self)) + '>'
def __eq__(self, other):
#
# This is inefficient. Good thing we don't need to do it much.
#
for rd in self.rdatasets:
if rd not in other.rdatasets:
return False
for rd in other.rdatasets:
if rd not in self.rdatasets:
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
def __len__(self):
return len(self.rdatasets)
def __iter__(self):
return iter(self.rdatasets)
def find_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE,
create=False):
"""Find an rdataset matching the specified properties in the
current node.
*rdclass*, an ``int``, the class of the rdataset.
*rdtype*, an ``int``, the type of the rdataset.
*covers*, an ``int``, the covered type. Usually this value is
dns.rdatatype.NONE, but if the rdtype is dns.rdatatype.SIG or
dns.rdatatype.RRSIG, then the covers value will be the rdata
type the SIG/RRSIG covers. The library treats the SIG and RRSIG
types as if they were a family of
types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). This makes RRSIGs much
easier to work with than if RRSIGs covering different rdata
types were aggregated into a single RRSIG rdataset.
*create*, a ``bool``. If True, create the rdataset if it is not found.
Raises ``KeyError`` if an rdataset of the desired type and class does
not exist and *create* is not ``True``.
Returns a ``dns.rdataset.Rdataset``.
"""
for rds in self.rdatasets:
if rds.match(rdclass, rdtype, covers):
return rds
if not create:
raise KeyError
rds = dns.rdataset.Rdataset(rdclass, rdtype)
self.rdatasets.append(rds)
return rds
def get_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE,
create=False):
"""Get an rdataset matching the specified properties in the
current node.
None is returned if an rdataset of the specified type and
class does not exist and *create* is not ``True``.
*rdclass*, an ``int``, the class of the rdataset.
*rdtype*, an ``int``, the type of the rdataset.
*covers*, an ``int``, the covered type. Usually this value is
dns.rdatatype.NONE, but if the rdtype is dns.rdatatype.SIG or
dns.rdatatype.RRSIG, then the covers value will be the rdata
type the SIG/RRSIG covers. The library treats the SIG and RRSIG
types as if they were a family of
types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). This makes RRSIGs much
easier to work with than if RRSIGs covering different rdata
types were aggregated into a single RRSIG rdataset.
*create*, a ``bool``. If True, create the rdataset if it is not found.
Returns a ``dns.rdataset.Rdataset`` or ``None``.
"""
try:
rds = self.find_rdataset(rdclass, rdtype, covers, create)
except KeyError:
rds = None
return rds
def delete_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE):
"""Delete the rdataset matching the specified properties in the
current node.
If a matching rdataset does not exist, it is not an error.
*rdclass*, an ``int``, the class of the rdataset.
*rdtype*, an ``int``, the type of the rdataset.
*covers*, an ``int``, the covered type.
"""
rds = self.get_rdataset(rdclass, rdtype, covers)
if rds is not None:
self.rdatasets.remove(rds)
def replace_rdataset(self, replacement):
"""Replace an rdataset.
It is not an error if there is no rdataset matching *replacement*.
Ownership of the *replacement* object is transferred to the node;
in other words, this method does not store a copy of *replacement*
at the node, it stores *replacement* itself.
*replacement*, a ``dns.rdataset.Rdataset``.
Raises ``ValueError`` if *replacement* is not a
``dns.rdataset.Rdataset``.
"""
if not isinstance(replacement, dns.rdataset.Rdataset):
raise ValueError('replacement is not an rdataset')
self.delete_rdataset(replacement.rdclass, replacement.rdtype,
replacement.covers)
self.rdatasets.append(replacement)

View file

@ -0,0 +1,119 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2001-2017 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS Opcodes."""
import dns.exception
#: Query
QUERY = 0
#: Inverse Query (historical)
IQUERY = 1
#: Server Status (unspecified and unimplemented anywhere)
STATUS = 2
#: Notify
NOTIFY = 4
#: Dynamic Update
UPDATE = 5
_by_text = {
'QUERY': QUERY,
'IQUERY': IQUERY,
'STATUS': STATUS,
'NOTIFY': NOTIFY,
'UPDATE': UPDATE
}
# We construct the inverse mapping programmatically to ensure that we
# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that
# would cause the mapping not to be true inverse.
_by_value = {y: x for x, y in _by_text.items()}
class UnknownOpcode(dns.exception.DNSException):
"""An DNS opcode is unknown."""
def from_text(text):
"""Convert text into an opcode.
*text*, a ``text``, the textual opcode
Raises ``dns.opcode.UnknownOpcode`` if the opcode is unknown.
Returns an ``int``.
"""
if text.isdigit():
value = int(text)
if value >= 0 and value <= 15:
return value
value = _by_text.get(text.upper())
if value is None:
raise UnknownOpcode
return value
def from_flags(flags):
"""Extract an opcode from DNS message flags.
*flags*, an ``int``, the DNS flags.
Returns an ``int``.
"""
return (flags & 0x7800) >> 11
def to_flags(value):
"""Convert an opcode to a value suitable for ORing into DNS message
flags.
*value*, an ``int``, the DNS opcode value.
Returns an ``int``.
"""
return (value << 11) & 0x7800
def to_text(value):
"""Convert an opcode to text.
*value*, an ``int`` the opcode value,
Raises ``dns.opcode.UnknownOpcode`` if the opcode is unknown.
Returns a ``text``.
"""
text = _by_value.get(value)
if text is None:
text = str(value)
return text
def is_update(flags):
"""Is the opcode in flags UPDATE?
*flags*, an ``int``, the DNS message flags.
Returns a ``bool``.
"""
return from_flags(flags) == UPDATE

View file

View file

@ -0,0 +1,683 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2017 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""Talk to a DNS server."""
from __future__ import generators
import errno
import select
import socket
import struct
import sys
import time
import dns.exception
import dns.inet
import dns.name
import dns.message
import dns.rcode
import dns.rdataclass
import dns.rdatatype
from ._compat import long, string_types, PY3
if PY3:
select_error = OSError
else:
select_error = select.error
# Function used to create a socket. Can be overridden if needed in special
# situations.
socket_factory = socket.socket
class UnexpectedSource(dns.exception.DNSException):
"""A DNS query response came from an unexpected address or port."""
class BadResponse(dns.exception.FormError):
"""A DNS query response does not respond to the question asked."""
class TransferError(dns.exception.DNSException):
"""A zone transfer response got a non-zero rcode."""
def __init__(self, rcode):
message = 'Zone transfer error: %s' % dns.rcode.to_text(rcode)
super(TransferError, self).__init__(message)
self.rcode = rcode
def _compute_expiration(timeout):
if timeout is None:
return None
else:
return time.time() + timeout
# This module can use either poll() or select() as the "polling backend".
#
# A backend function takes an fd, bools for readability, writablity, and
# error detection, and a timeout.
def _poll_for(fd, readable, writable, error, timeout):
"""Poll polling backend."""
event_mask = 0
if readable:
event_mask |= select.POLLIN
if writable:
event_mask |= select.POLLOUT
if error:
event_mask |= select.POLLERR
pollable = select.poll()
pollable.register(fd, event_mask)
if timeout:
event_list = pollable.poll(long(timeout * 1000))
else:
event_list = pollable.poll()
return bool(event_list)
def _select_for(fd, readable, writable, error, timeout):
"""Select polling backend."""
rset, wset, xset = [], [], []
if readable:
rset = [fd]
if writable:
wset = [fd]
if error:
xset = [fd]
if timeout is None:
(rcount, wcount, xcount) = select.select(rset, wset, xset)
else:
(rcount, wcount, xcount) = select.select(rset, wset, xset, timeout)
return bool((rcount or wcount or xcount))
def _wait_for(fd, readable, writable, error, expiration):
# Use the selected polling backend to wait for any of the specified
# events. An "expiration" absolute time is converted into a relative
# timeout.
done = False
while not done:
if expiration is None:
timeout = None
else:
timeout = expiration - time.time()
if timeout <= 0.0:
raise dns.exception.Timeout
try:
if not _polling_backend(fd, readable, writable, error, timeout):
raise dns.exception.Timeout
except select_error as e:
if e.args[0] != errno.EINTR:
raise e
done = True
def _set_polling_backend(fn):
# Internal API. Do not use.
global _polling_backend
_polling_backend = fn
if hasattr(select, 'poll'):
# Prefer poll() on platforms that support it because it has no
# limits on the maximum value of a file descriptor (plus it will
# be more efficient for high values).
_polling_backend = _poll_for
else:
_polling_backend = _select_for
def _wait_for_readable(s, expiration):
_wait_for(s, True, False, True, expiration)
def _wait_for_writable(s, expiration):
_wait_for(s, False, True, True, expiration)
def _addresses_equal(af, a1, a2):
# Convert the first value of the tuple, which is a textual format
# address into binary form, so that we are not confused by different
# textual representations of the same address
try:
n1 = dns.inet.inet_pton(af, a1[0])
n2 = dns.inet.inet_pton(af, a2[0])
except dns.exception.SyntaxError:
return False
return n1 == n2 and a1[1:] == a2[1:]
def _destination_and_source(af, where, port, source, source_port):
# Apply defaults and compute destination and source tuples
# suitable for use in connect(), sendto(), or bind().
if af is None:
try:
af = dns.inet.af_for_address(where)
except Exception:
af = dns.inet.AF_INET
if af == dns.inet.AF_INET:
destination = (where, port)
if source is not None or source_port != 0:
if source is None:
source = '0.0.0.0'
source = (source, source_port)
elif af == dns.inet.AF_INET6:
destination = (where, port, 0, 0)
if source is not None or source_port != 0:
if source is None:
source = '::'
source = (source, source_port, 0, 0)
return (af, destination, source)
def send_udp(sock, what, destination, expiration=None):
"""Send a DNS message to the specified UDP socket.
*sock*, a ``socket``.
*what*, a ``binary`` or ``dns.message.Message``, the message to send.
*destination*, a destination tuple appropriate for the address family
of the socket, specifying where to send the query.
*expiration*, a ``float`` or ``None``, the absolute time at which
a timeout exception should be raised. If ``None``, no timeout will
occur.
Returns an ``(int, float)`` tuple of bytes sent and the sent time.
"""
if isinstance(what, dns.message.Message):
what = what.to_wire()
_wait_for_writable(sock, expiration)
sent_time = time.time()
n = sock.sendto(what, destination)
return (n, sent_time)
def receive_udp(sock, destination, expiration=None,
ignore_unexpected=False, one_rr_per_rrset=False,
keyring=None, request_mac=b'', ignore_trailing=False):
"""Read a DNS message from a UDP socket.
*sock*, a ``socket``.
*destination*, a destination tuple appropriate for the address family
of the socket, specifying where the associated query was sent.
*expiration*, a ``float`` or ``None``, the absolute time at which
a timeout exception should be raised. If ``None``, no timeout will
occur.
*ignore_unexpected*, a ``bool``. If ``True``, ignore responses from
unexpected sources.
*one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own
RRset.
*keyring*, a ``dict``, the keyring to use for TSIG.
*request_mac*, a ``binary``, the MAC of the request (for TSIG).
*ignore_trailing*, a ``bool``. If ``True``, ignore trailing
junk at end of the received message.
Raises if the message is malformed, if network errors occur, of if
there is a timeout.
Returns a ``dns.message.Message`` object.
"""
wire = b''
while 1:
_wait_for_readable(sock, expiration)
(wire, from_address) = sock.recvfrom(65535)
if _addresses_equal(sock.family, from_address, destination) or \
(dns.inet.is_multicast(destination[0]) and
from_address[1:] == destination[1:]):
break
if not ignore_unexpected:
raise UnexpectedSource('got a response from '
'%s instead of %s' % (from_address,
destination))
received_time = time.time()
r = dns.message.from_wire(wire, keyring=keyring, request_mac=request_mac,
one_rr_per_rrset=one_rr_per_rrset,
ignore_trailing=ignore_trailing)
return (r, received_time)
def udp(q, where, timeout=None, port=53, af=None, source=None, source_port=0,
ignore_unexpected=False, one_rr_per_rrset=False, ignore_trailing=False):
"""Return the response obtained after sending a query via UDP.
*q*, a ``dns.message.Message``, the query to send
*where*, a ``text`` containing an IPv4 or IPv6 address, where
to send the message.
*timeout*, a ``float`` or ``None``, the number of seconds to wait before the
query times out. If ``None``, the default, wait forever.
*port*, an ``int``, the port send the message to. The default is 53.
*af*, an ``int``, the address family to use. The default is ``None``,
which causes the address family to use to be inferred from the form of
*where*. If the inference attempt fails, AF_INET is used. This
parameter is historical; you need never set it.
*source*, a ``text`` containing an IPv4 or IPv6 address, specifying
the source address. The default is the wildcard address.
*source_port*, an ``int``, the port from which to send the message.
The default is 0.
*ignore_unexpected*, a ``bool``. If ``True``, ignore responses from
unexpected sources.
*one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own
RRset.
*ignore_trailing*, a ``bool``. If ``True``, ignore trailing
junk at end of the received message.
Returns a ``dns.message.Message``.
"""
wire = q.to_wire()
(af, destination, source) = _destination_and_source(af, where, port,
source, source_port)
s = socket_factory(af, socket.SOCK_DGRAM, 0)
received_time = None
sent_time = None
try:
expiration = _compute_expiration(timeout)
s.setblocking(0)
if source is not None:
s.bind(source)
(_, sent_time) = send_udp(s, wire, destination, expiration)
(r, received_time) = receive_udp(s, destination, expiration,
ignore_unexpected, one_rr_per_rrset,
q.keyring, q.mac, ignore_trailing)
finally:
if sent_time is None or received_time is None:
response_time = 0
else:
response_time = received_time - sent_time
s.close()
r.time = response_time
if not q.is_response(r):
raise BadResponse
return r
def _net_read(sock, count, expiration):
"""Read the specified number of bytes from sock. Keep trying until we
either get the desired amount, or we hit EOF.
A Timeout exception will be raised if the operation is not completed
by the expiration time.
"""
s = b''
while count > 0:
_wait_for_readable(sock, expiration)
n = sock.recv(count)
if n == b'':
raise EOFError
count = count - len(n)
s = s + n
return s
def _net_write(sock, data, expiration):
"""Write the specified data to the socket.
A Timeout exception will be raised if the operation is not completed
by the expiration time.
"""
current = 0
l = len(data)
while current < l:
_wait_for_writable(sock, expiration)
current += sock.send(data[current:])
def send_tcp(sock, what, expiration=None):
"""Send a DNS message to the specified TCP socket.
*sock*, a ``socket``.
*what*, a ``binary`` or ``dns.message.Message``, the message to send.
*expiration*, a ``float`` or ``None``, the absolute time at which
a timeout exception should be raised. If ``None``, no timeout will
occur.
Returns an ``(int, float)`` tuple of bytes sent and the sent time.
"""
if isinstance(what, dns.message.Message):
what = what.to_wire()
l = len(what)
# copying the wire into tcpmsg is inefficient, but lets us
# avoid writev() or doing a short write that would get pushed
# onto the net
tcpmsg = struct.pack("!H", l) + what
_wait_for_writable(sock, expiration)
sent_time = time.time()
_net_write(sock, tcpmsg, expiration)
return (len(tcpmsg), sent_time)
def receive_tcp(sock, expiration=None, one_rr_per_rrset=False,
keyring=None, request_mac=b'', ignore_trailing=False):
"""Read a DNS message from a TCP socket.
*sock*, a ``socket``.
*expiration*, a ``float`` or ``None``, the absolute time at which
a timeout exception should be raised. If ``None``, no timeout will
occur.
*one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own
RRset.
*keyring*, a ``dict``, the keyring to use for TSIG.
*request_mac*, a ``binary``, the MAC of the request (for TSIG).
*ignore_trailing*, a ``bool``. If ``True``, ignore trailing
junk at end of the received message.
Raises if the message is malformed, if network errors occur, of if
there is a timeout.
Returns a ``dns.message.Message`` object.
"""
ldata = _net_read(sock, 2, expiration)
(l,) = struct.unpack("!H", ldata)
wire = _net_read(sock, l, expiration)
received_time = time.time()
r = dns.message.from_wire(wire, keyring=keyring, request_mac=request_mac,
one_rr_per_rrset=one_rr_per_rrset,
ignore_trailing=ignore_trailing)
return (r, received_time)
def _connect(s, address):
try:
s.connect(address)
except socket.error:
(ty, v) = sys.exc_info()[:2]
if hasattr(v, 'errno'):
v_err = v.errno
else:
v_err = v[0]
if v_err not in [errno.EINPROGRESS, errno.EWOULDBLOCK, errno.EALREADY]:
raise v
def tcp(q, where, timeout=None, port=53, af=None, source=None, source_port=0,
one_rr_per_rrset=False, ignore_trailing=False):
"""Return the response obtained after sending a query via TCP.
*q*, a ``dns.message.Message``, the query to send
*where*, a ``text`` containing an IPv4 or IPv6 address, where
to send the message.
*timeout*, a ``float`` or ``None``, the number of seconds to wait before the
query times out. If ``None``, the default, wait forever.
*port*, an ``int``, the port send the message to. The default is 53.
*af*, an ``int``, the address family to use. The default is ``None``,
which causes the address family to use to be inferred from the form of
*where*. If the inference attempt fails, AF_INET is used. This
parameter is historical; you need never set it.
*source*, a ``text`` containing an IPv4 or IPv6 address, specifying
the source address. The default is the wildcard address.
*source_port*, an ``int``, the port from which to send the message.
The default is 0.
*one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own
RRset.
*ignore_trailing*, a ``bool``. If ``True``, ignore trailing
junk at end of the received message.
Returns a ``dns.message.Message``.
"""
wire = q.to_wire()
(af, destination, source) = _destination_and_source(af, where, port,
source, source_port)
s = socket_factory(af, socket.SOCK_STREAM, 0)
begin_time = None
received_time = None
try:
expiration = _compute_expiration(timeout)
s.setblocking(0)
begin_time = time.time()
if source is not None:
s.bind(source)
_connect(s, destination)
send_tcp(s, wire, expiration)
(r, received_time) = receive_tcp(s, expiration, one_rr_per_rrset,
q.keyring, q.mac, ignore_trailing)
finally:
if begin_time is None or received_time is None:
response_time = 0
else:
response_time = received_time - begin_time
s.close()
r.time = response_time
if not q.is_response(r):
raise BadResponse
return r
def xfr(where, zone, rdtype=dns.rdatatype.AXFR, rdclass=dns.rdataclass.IN,
timeout=None, port=53, keyring=None, keyname=None, relativize=True,
af=None, lifetime=None, source=None, source_port=0, serial=0,
use_udp=False, keyalgorithm=dns.tsig.default_algorithm):
"""Return a generator for the responses to a zone transfer.
*where*. If the inference attempt fails, AF_INET is used. This
parameter is historical; you need never set it.
*zone*, a ``dns.name.Name`` or ``text``, the name of the zone to transfer.
*rdtype*, an ``int`` or ``text``, the type of zone transfer. The
default is ``dns.rdatatype.AXFR``. ``dns.rdatatype.IXFR`` can be
used to do an incremental transfer instead.
*rdclass*, an ``int`` or ``text``, the class of the zone transfer.
The default is ``dns.rdataclass.IN``.
*timeout*, a ``float``, the number of seconds to wait for each
response message. If None, the default, wait forever.
*port*, an ``int``, the port send the message to. The default is 53.
*keyring*, a ``dict``, the keyring to use for TSIG.
*keyname*, a ``dns.name.Name`` or ``text``, the name of the TSIG
key to use.
*relativize*, a ``bool``. If ``True``, all names in the zone will be
relativized to the zone origin. It is essential that the
relativize setting matches the one specified to
``dns.zone.from_xfr()`` if using this generator to make a zone.
*af*, an ``int``, the address family to use. The default is ``None``,
which causes the address family to use to be inferred from the form of
*where*. If the inference attempt fails, AF_INET is used. This
parameter is historical; you need never set it.
*lifetime*, a ``float``, the total number of seconds to spend
doing the transfer. If ``None``, the default, then there is no
limit on the time the transfer may take.
*source*, a ``text`` containing an IPv4 or IPv6 address, specifying
the source address. The default is the wildcard address.
*source_port*, an ``int``, the port from which to send the message.
The default is 0.
*serial*, an ``int``, the SOA serial number to use as the base for
an IXFR diff sequence (only meaningful if *rdtype* is
``dns.rdatatype.IXFR``).
*use_udp*, a ``bool``. If ``True``, use UDP (only meaningful for IXFR).
*keyalgorithm*, a ``dns.name.Name`` or ``text``, the TSIG algorithm to use.
Raises on errors, and so does the generator.
Returns a generator of ``dns.message.Message`` objects.
"""
if isinstance(zone, string_types):
zone = dns.name.from_text(zone)
if isinstance(rdtype, string_types):
rdtype = dns.rdatatype.from_text(rdtype)
q = dns.message.make_query(zone, rdtype, rdclass)
if rdtype == dns.rdatatype.IXFR:
rrset = dns.rrset.from_text(zone, 0, 'IN', 'SOA',
'. . %u 0 0 0 0' % serial)
q.authority.append(rrset)
if keyring is not None:
q.use_tsig(keyring, keyname, algorithm=keyalgorithm)
wire = q.to_wire()
(af, destination, source) = _destination_and_source(af, where, port,
source, source_port)
if use_udp:
if rdtype != dns.rdatatype.IXFR:
raise ValueError('cannot do a UDP AXFR')
s = socket_factory(af, socket.SOCK_DGRAM, 0)
else:
s = socket_factory(af, socket.SOCK_STREAM, 0)
s.setblocking(0)
if source is not None:
s.bind(source)
expiration = _compute_expiration(lifetime)
_connect(s, destination)
l = len(wire)
if use_udp:
_wait_for_writable(s, expiration)
s.send(wire)
else:
tcpmsg = struct.pack("!H", l) + wire
_net_write(s, tcpmsg, expiration)
done = False
delete_mode = True
expecting_SOA = False
soa_rrset = None
if relativize:
origin = zone
oname = dns.name.empty
else:
origin = None
oname = zone
tsig_ctx = None
first = True
while not done:
mexpiration = _compute_expiration(timeout)
if mexpiration is None or mexpiration > expiration:
mexpiration = expiration
if use_udp:
_wait_for_readable(s, expiration)
(wire, from_address) = s.recvfrom(65535)
else:
ldata = _net_read(s, 2, mexpiration)
(l,) = struct.unpack("!H", ldata)
wire = _net_read(s, l, mexpiration)
is_ixfr = (rdtype == dns.rdatatype.IXFR)
r = dns.message.from_wire(wire, keyring=q.keyring, request_mac=q.mac,
xfr=True, origin=origin, tsig_ctx=tsig_ctx,
multi=True, first=first,
one_rr_per_rrset=is_ixfr)
rcode = r.rcode()
if rcode != dns.rcode.NOERROR:
raise TransferError(rcode)
tsig_ctx = r.tsig_ctx
first = False
answer_index = 0
if soa_rrset is None:
if not r.answer or r.answer[0].name != oname:
raise dns.exception.FormError(
"No answer or RRset not for qname")
rrset = r.answer[0]
if rrset.rdtype != dns.rdatatype.SOA:
raise dns.exception.FormError("first RRset is not an SOA")
answer_index = 1
soa_rrset = rrset.copy()
if rdtype == dns.rdatatype.IXFR:
if soa_rrset[0].serial <= serial:
#
# We're already up-to-date.
#
done = True
else:
expecting_SOA = True
#
# Process SOAs in the answer section (other than the initial
# SOA in the first message).
#
for rrset in r.answer[answer_index:]:
if done:
raise dns.exception.FormError("answers after final SOA")
if rrset.rdtype == dns.rdatatype.SOA and rrset.name == oname:
if expecting_SOA:
if rrset[0].serial != serial:
raise dns.exception.FormError(
"IXFR base serial mismatch")
expecting_SOA = False
elif rdtype == dns.rdatatype.IXFR:
delete_mode = not delete_mode
#
# If this SOA RRset is equal to the first we saw then we're
# finished. If this is an IXFR we also check that we're seeing
# the record in the expected part of the response.
#
if rrset == soa_rrset and \
(rdtype == dns.rdatatype.AXFR or
(rdtype == dns.rdatatype.IXFR and delete_mode)):
done = True
elif expecting_SOA:
#
# We made an IXFR request and are expecting another
# SOA RR, but saw something else, so this must be an
# AXFR response.
#
rdtype = dns.rdatatype.AXFR
expecting_SOA = False
if done and q.keyring and not r.had_tsig:
raise dns.exception.FormError("missing TSIG")
yield r
s.close()

View file

@ -0,0 +1,144 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2001-2017 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS Result Codes."""
import dns.exception
from ._compat import long
#: No error
NOERROR = 0
#: Form error
FORMERR = 1
#: Server failure
SERVFAIL = 2
#: Name does not exist ("Name Error" in RFC 1025 terminology).
NXDOMAIN = 3
#: Not implemented
NOTIMP = 4
#: Refused
REFUSED = 5
#: Name exists.
YXDOMAIN = 6
#: RRset exists.
YXRRSET = 7
#: RRset does not exist.
NXRRSET = 8
#: Not authoritative.
NOTAUTH = 9
#: Name not in zone.
NOTZONE = 10
#: Bad EDNS version.
BADVERS = 16
_by_text = {
'NOERROR': NOERROR,
'FORMERR': FORMERR,
'SERVFAIL': SERVFAIL,
'NXDOMAIN': NXDOMAIN,
'NOTIMP': NOTIMP,
'REFUSED': REFUSED,
'YXDOMAIN': YXDOMAIN,
'YXRRSET': YXRRSET,
'NXRRSET': NXRRSET,
'NOTAUTH': NOTAUTH,
'NOTZONE': NOTZONE,
'BADVERS': BADVERS
}
# We construct the inverse mapping programmatically to ensure that we
# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that
# would cause the mapping not to be a true inverse.
_by_value = {y: x for x, y in _by_text.items()}
class UnknownRcode(dns.exception.DNSException):
"""A DNS rcode is unknown."""
def from_text(text):
"""Convert text into an rcode.
*text*, a ``text``, the textual rcode or an integer in textual form.
Raises ``dns.rcode.UnknownRcode`` if the rcode mnemonic is unknown.
Returns an ``int``.
"""
if text.isdigit():
v = int(text)
if v >= 0 and v <= 4095:
return v
v = _by_text.get(text.upper())
if v is None:
raise UnknownRcode
return v
def from_flags(flags, ednsflags):
"""Return the rcode value encoded by flags and ednsflags.
*flags*, an ``int``, the DNS flags field.
*ednsflags*, an ``int``, the EDNS flags field.
Raises ``ValueError`` if rcode is < 0 or > 4095
Returns an ``int``.
"""
value = (flags & 0x000f) | ((ednsflags >> 20) & 0xff0)
if value < 0 or value > 4095:
raise ValueError('rcode must be >= 0 and <= 4095')
return value
def to_flags(value):
"""Return a (flags, ednsflags) tuple which encodes the rcode.
*value*, an ``int``, the rcode.
Raises ``ValueError`` if rcode is < 0 or > 4095.
Returns an ``(int, int)`` tuple.
"""
if value < 0 or value > 4095:
raise ValueError('rcode must be >= 0 and <= 4095')
v = value & 0xf
ev = long(value & 0xff0) << 20
return (v, ev)
def to_text(value):
"""Convert rcode into text.
*value*, and ``int``, the rcode.
Raises ``ValueError`` if rcode is < 0 or > 4095.
Returns a ``text``.
"""
if value < 0 or value > 4095:
raise ValueError('rcode must be >= 0 and <= 4095')
text = _by_value.get(value)
if text is None:
text = str(value)
return text

View file

@ -0,0 +1,456 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2001-2017 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS rdata."""
from io import BytesIO
import base64
import binascii
import dns.exception
import dns.name
import dns.rdataclass
import dns.rdatatype
import dns.tokenizer
import dns.wiredata
from ._compat import xrange, string_types, text_type
try:
import threading as _threading
except ImportError:
import dummy_threading as _threading
_hex_chunksize = 32
def _hexify(data, chunksize=_hex_chunksize):
"""Convert a binary string into its hex encoding, broken up into chunks
of chunksize characters separated by a space.
"""
line = binascii.hexlify(data)
return b' '.join([line[i:i + chunksize]
for i
in range(0, len(line), chunksize)]).decode()
_base64_chunksize = 32
def _base64ify(data, chunksize=_base64_chunksize):
"""Convert a binary string into its base64 encoding, broken up into chunks
of chunksize characters separated by a space.
"""
line = base64.b64encode(data)
return b' '.join([line[i:i + chunksize]
for i
in range(0, len(line), chunksize)]).decode()
__escaped = bytearray(b'"\\')
def _escapify(qstring):
"""Escape the characters in a quoted string which need it."""
if isinstance(qstring, text_type):
qstring = qstring.encode()
if not isinstance(qstring, bytearray):
qstring = bytearray(qstring)
text = ''
for c in qstring:
if c in __escaped:
text += '\\' + chr(c)
elif c >= 0x20 and c < 0x7F:
text += chr(c)
else:
text += '\\%03d' % c
return text
def _truncate_bitmap(what):
"""Determine the index of greatest byte that isn't all zeros, and
return the bitmap that contains all the bytes less than that index.
"""
for i in xrange(len(what) - 1, -1, -1):
if what[i] != 0:
return what[0: i + 1]
return what[0:1]
class Rdata(object):
"""Base class for all DNS rdata types."""
__slots__ = ['rdclass', 'rdtype']
def __init__(self, rdclass, rdtype):
"""Initialize an rdata.
*rdclass*, an ``int`` is the rdataclass of the Rdata.
*rdtype*, an ``int`` is the rdatatype of the Rdata.
"""
self.rdclass = rdclass
self.rdtype = rdtype
def covers(self):
"""Return the type a Rdata covers.
DNS SIG/RRSIG rdatas apply to a specific type; this type is
returned by the covers() function. If the rdata type is not
SIG or RRSIG, dns.rdatatype.NONE is returned. This is useful when
creating rdatasets, allowing the rdataset to contain only RRSIGs
of a particular type, e.g. RRSIG(NS).
Returns an ``int``.
"""
return dns.rdatatype.NONE
def extended_rdatatype(self):
"""Return a 32-bit type value, the least significant 16 bits of
which are the ordinary DNS type, and the upper 16 bits of which are
the "covered" type, if any.
Returns an ``int``.
"""
return self.covers() << 16 | self.rdtype
def to_text(self, origin=None, relativize=True, **kw):
"""Convert an rdata to text format.
Returns a ``text``.
"""
raise NotImplementedError
def to_wire(self, file, compress=None, origin=None):
"""Convert an rdata to wire format.
Returns a ``binary``.
"""
raise NotImplementedError
def to_digestable(self, origin=None):
"""Convert rdata to a format suitable for digesting in hashes. This
is also the DNSSEC canonical form.
Returns a ``binary``.
"""
f = BytesIO()
self.to_wire(f, None, origin)
return f.getvalue()
def validate(self):
"""Check that the current contents of the rdata's fields are
valid.
If you change an rdata by assigning to its fields,
it is a good idea to call validate() when you are done making
changes.
Raises various exceptions if there are problems.
Returns ``None``.
"""
dns.rdata.from_text(self.rdclass, self.rdtype, self.to_text())
def __repr__(self):
covers = self.covers()
if covers == dns.rdatatype.NONE:
ctext = ''
else:
ctext = '(' + dns.rdatatype.to_text(covers) + ')'
return '<DNS ' + dns.rdataclass.to_text(self.rdclass) + ' ' + \
dns.rdatatype.to_text(self.rdtype) + ctext + ' rdata: ' + \
str(self) + '>'
def __str__(self):
return self.to_text()
def _cmp(self, other):
"""Compare an rdata with another rdata of the same rdtype and
rdclass.
Return < 0 if self < other in the DNSSEC ordering, 0 if self
== other, and > 0 if self > other.
"""
our = self.to_digestable(dns.name.root)
their = other.to_digestable(dns.name.root)
if our == their:
return 0
elif our > their:
return 1
else:
return -1
def __eq__(self, other):
if not isinstance(other, Rdata):
return False
if self.rdclass != other.rdclass or self.rdtype != other.rdtype:
return False
return self._cmp(other) == 0
def __ne__(self, other):
if not isinstance(other, Rdata):
return True
if self.rdclass != other.rdclass or self.rdtype != other.rdtype:
return True
return self._cmp(other) != 0
def __lt__(self, other):
if not isinstance(other, Rdata) or \
self.rdclass != other.rdclass or self.rdtype != other.rdtype:
return NotImplemented
return self._cmp(other) < 0
def __le__(self, other):
if not isinstance(other, Rdata) or \
self.rdclass != other.rdclass or self.rdtype != other.rdtype:
return NotImplemented
return self._cmp(other) <= 0
def __ge__(self, other):
if not isinstance(other, Rdata) or \
self.rdclass != other.rdclass or self.rdtype != other.rdtype:
return NotImplemented
return self._cmp(other) >= 0
def __gt__(self, other):
if not isinstance(other, Rdata) or \
self.rdclass != other.rdclass or self.rdtype != other.rdtype:
return NotImplemented
return self._cmp(other) > 0
def __hash__(self):
return hash(self.to_digestable(dns.name.root))
@classmethod
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
raise NotImplementedError
@classmethod
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
raise NotImplementedError
def choose_relativity(self, origin=None, relativize=True):
"""Convert any domain names in the rdata to the specified
relativization.
"""
class GenericRdata(Rdata):
"""Generic Rdata Class
This class is used for rdata types for which we have no better
implementation. It implements the DNS "unknown RRs" scheme.
"""
__slots__ = ['data']
def __init__(self, rdclass, rdtype, data):
super(GenericRdata, self).__init__(rdclass, rdtype)
self.data = data
def to_text(self, origin=None, relativize=True, **kw):
return r'\# %d ' % len(self.data) + _hexify(self.data)
@classmethod
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
token = tok.get()
if not token.is_identifier() or token.value != r'\#':
raise dns.exception.SyntaxError(
r'generic rdata does not start with \#')
length = tok.get_int()
chunks = []
while 1:
token = tok.get()
if token.is_eol_or_eof():
break
chunks.append(token.value.encode())
hex = b''.join(chunks)
data = binascii.unhexlify(hex)
if len(data) != length:
raise dns.exception.SyntaxError(
'generic rdata hex data has wrong length')
return cls(rdclass, rdtype, data)
def to_wire(self, file, compress=None, origin=None):
file.write(self.data)
@classmethod
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
return cls(rdclass, rdtype, wire[current: current + rdlen])
_rdata_modules = {}
_module_prefix = 'dns.rdtypes'
_import_lock = _threading.Lock()
def get_rdata_class(rdclass, rdtype):
def import_module(name):
with _import_lock:
mod = __import__(name)
components = name.split('.')
for comp in components[1:]:
mod = getattr(mod, comp)
return mod
mod = _rdata_modules.get((rdclass, rdtype))
rdclass_text = dns.rdataclass.to_text(rdclass)
rdtype_text = dns.rdatatype.to_text(rdtype)
rdtype_text = rdtype_text.replace('-', '_')
if not mod:
mod = _rdata_modules.get((dns.rdatatype.ANY, rdtype))
if not mod:
try:
mod = import_module('.'.join([_module_prefix,
rdclass_text, rdtype_text]))
_rdata_modules[(rdclass, rdtype)] = mod
except ImportError:
try:
mod = import_module('.'.join([_module_prefix,
'ANY', rdtype_text]))
_rdata_modules[(dns.rdataclass.ANY, rdtype)] = mod
except ImportError:
mod = None
if mod:
cls = getattr(mod, rdtype_text)
else:
cls = GenericRdata
return cls
def from_text(rdclass, rdtype, tok, origin=None, relativize=True):
"""Build an rdata object from text format.
This function attempts to dynamically load a class which
implements the specified rdata class and type. If there is no
class-and-type-specific implementation, the GenericRdata class
is used.
Once a class is chosen, its from_text() class method is called
with the parameters to this function.
If *tok* is a ``text``, then a tokenizer is created and the string
is used as its input.
*rdclass*, an ``int``, the rdataclass.
*rdtype*, an ``int``, the rdatatype.
*tok*, a ``dns.tokenizer.Tokenizer`` or a ``text``.
*origin*, a ``dns.name.Name`` (or ``None``), the
origin to use for relative names.
*relativize*, a ``bool``. If true, name will be relativized to
the specified origin.
Returns an instance of the chosen Rdata subclass.
"""
if isinstance(tok, string_types):
tok = dns.tokenizer.Tokenizer(tok)
cls = get_rdata_class(rdclass, rdtype)
if cls != GenericRdata:
# peek at first token
token = tok.get()
tok.unget(token)
if token.is_identifier() and \
token.value == r'\#':
#
# Known type using the generic syntax. Extract the
# wire form from the generic syntax, and then run
# from_wire on it.
#
rdata = GenericRdata.from_text(rdclass, rdtype, tok, origin,
relativize)
return from_wire(rdclass, rdtype, rdata.data, 0, len(rdata.data),
origin)
return cls.from_text(rdclass, rdtype, tok, origin, relativize)
def from_wire(rdclass, rdtype, wire, current, rdlen, origin=None):
"""Build an rdata object from wire format
This function attempts to dynamically load a class which
implements the specified rdata class and type. If there is no
class-and-type-specific implementation, the GenericRdata class
is used.
Once a class is chosen, its from_wire() class method is called
with the parameters to this function.
*rdclass*, an ``int``, the rdataclass.
*rdtype*, an ``int``, the rdatatype.
*wire*, a ``binary``, the wire-format message.
*current*, an ``int``, the offset in wire of the beginning of
the rdata.
*rdlen*, an ``int``, the length of the wire-format rdata
*origin*, a ``dns.name.Name`` (or ``None``). If not ``None``,
then names will be relativized to this origin.
Returns an instance of the chosen Rdata subclass.
"""
wire = dns.wiredata.maybe_wrap(wire)
cls = get_rdata_class(rdclass, rdtype)
return cls.from_wire(rdclass, rdtype, wire, current, rdlen, origin)
class RdatatypeExists(dns.exception.DNSException):
"""DNS rdatatype already exists."""
supp_kwargs = {'rdclass', 'rdtype'}
fmt = "The rdata type with class {rdclass} and rdtype {rdtype} " + \
"already exists."
def register_type(implementation, rdtype, rdtype_text, is_singleton=False,
rdclass=dns.rdataclass.IN):
"""Dynamically register a module to handle an rdatatype.
*implementation*, a module implementing the type in the usual dnspython
way.
*rdtype*, an ``int``, the rdatatype to register.
*rdtype_text*, a ``text``, the textual form of the rdatatype.
*is_singleton*, a ``bool``, indicating if the type is a singleton (i.e.
RRsets of the type can have only one member.)
*rdclass*, the rdataclass of the type, or ``dns.rdataclass.ANY`` if
it applies to all classes.
"""
existing_cls = get_rdata_class(rdclass, rdtype)
if existing_cls != GenericRdata:
raise RdatatypeExists(rdclass=rdclass, rdtype=rdtype)
_rdata_modules[(rdclass, rdtype)] = implementation
dns.rdatatype.register_type(rdtype, rdtype_text, is_singleton)

View file

@ -0,0 +1,122 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2001-2017 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS Rdata Classes."""
import re
import dns.exception
RESERVED0 = 0
IN = 1
CH = 3
HS = 4
NONE = 254
ANY = 255
_by_text = {
'RESERVED0': RESERVED0,
'IN': IN,
'CH': CH,
'HS': HS,
'NONE': NONE,
'ANY': ANY
}
# We construct the inverse mapping programmatically to ensure that we
# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that
# would cause the mapping not to be true inverse.
_by_value = {y: x for x, y in _by_text.items()}
# Now that we've built the inverse map, we can add class aliases to
# the _by_text mapping.
_by_text.update({
'INTERNET': IN,
'CHAOS': CH,
'HESIOD': HS
})
_metaclasses = {
NONE: True,
ANY: True
}
_unknown_class_pattern = re.compile('CLASS([0-9]+)$', re.I)
class UnknownRdataclass(dns.exception.DNSException):
"""A DNS class is unknown."""
def from_text(text):
"""Convert text into a DNS rdata class value.
The input text can be a defined DNS RR class mnemonic or
instance of the DNS generic class syntax.
For example, "IN" and "CLASS1" will both result in a value of 1.
Raises ``dns.rdatatype.UnknownRdataclass`` if the class is unknown.
Raises ``ValueError`` if the rdata class value is not >= 0 and <= 65535.
Returns an ``int``.
"""
value = _by_text.get(text.upper())
if value is None:
match = _unknown_class_pattern.match(text)
if match is None:
raise UnknownRdataclass
value = int(match.group(1))
if value < 0 or value > 65535:
raise ValueError("class must be between >= 0 and <= 65535")
return value
def to_text(value):
"""Convert a DNS rdata type value to text.
If the value has a known mnemonic, it will be used, otherwise the
DNS generic class syntax will be used.
Raises ``ValueError`` if the rdata class value is not >= 0 and <= 65535.
Returns a ``str``.
"""
if value < 0 or value > 65535:
raise ValueError("class must be between >= 0 and <= 65535")
text = _by_value.get(value)
if text is None:
text = 'CLASS' + repr(value)
return text
def is_metaclass(rdclass):
"""True if the specified class is a metaclass.
The currently defined metaclasses are ANY and NONE.
*rdclass* is an ``int``.
"""
if rdclass in _metaclasses:
return True
return False

View file

@ -0,0 +1,347 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2001-2017 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS rdatasets (an rdataset is a set of rdatas of a given type and class)"""
import random
from io import StringIO
import struct
import dns.exception
import dns.rdatatype
import dns.rdataclass
import dns.rdata
import dns.set
from ._compat import string_types
# define SimpleSet here for backwards compatibility
SimpleSet = dns.set.Set
class DifferingCovers(dns.exception.DNSException):
"""An attempt was made to add a DNS SIG/RRSIG whose covered type
is not the same as that of the other rdatas in the rdataset."""
class IncompatibleTypes(dns.exception.DNSException):
"""An attempt was made to add DNS RR data of an incompatible type."""
class Rdataset(dns.set.Set):
"""A DNS rdataset."""
__slots__ = ['rdclass', 'rdtype', 'covers', 'ttl']
def __init__(self, rdclass, rdtype, covers=dns.rdatatype.NONE, ttl=0):
"""Create a new rdataset of the specified class and type.
*rdclass*, an ``int``, the rdataclass.
*rdtype*, an ``int``, the rdatatype.
*covers*, an ``int``, the covered rdatatype.
*ttl*, an ``int``, the TTL.
"""
super(Rdataset, self).__init__()
self.rdclass = rdclass
self.rdtype = rdtype
self.covers = covers
self.ttl = ttl
def _clone(self):
obj = super(Rdataset, self)._clone()
obj.rdclass = self.rdclass
obj.rdtype = self.rdtype
obj.covers = self.covers
obj.ttl = self.ttl
return obj
def update_ttl(self, ttl):
"""Perform TTL minimization.
Set the TTL of the rdataset to be the lesser of the set's current
TTL or the specified TTL. If the set contains no rdatas, set the TTL
to the specified TTL.
*ttl*, an ``int``.
"""
if len(self) == 0:
self.ttl = ttl
elif ttl < self.ttl:
self.ttl = ttl
def add(self, rd, ttl=None):
"""Add the specified rdata to the rdataset.
If the optional *ttl* parameter is supplied, then
``self.update_ttl(ttl)`` will be called prior to adding the rdata.
*rd*, a ``dns.rdata.Rdata``, the rdata
*ttl*, an ``int``, the TTL.
Raises ``dns.rdataset.IncompatibleTypes`` if the type and class
do not match the type and class of the rdataset.
Raises ``dns.rdataset.DifferingCovers`` if the type is a signature
type and the covered type does not match that of the rdataset.
"""
#
# If we're adding a signature, do some special handling to
# check that the signature covers the same type as the
# other rdatas in this rdataset. If this is the first rdata
# in the set, initialize the covers field.
#
if self.rdclass != rd.rdclass or self.rdtype != rd.rdtype:
raise IncompatibleTypes
if ttl is not None:
self.update_ttl(ttl)
if self.rdtype == dns.rdatatype.RRSIG or \
self.rdtype == dns.rdatatype.SIG:
covers = rd.covers()
if len(self) == 0 and self.covers == dns.rdatatype.NONE:
self.covers = covers
elif self.covers != covers:
raise DifferingCovers
if dns.rdatatype.is_singleton(rd.rdtype) and len(self) > 0:
self.clear()
super(Rdataset, self).add(rd)
def union_update(self, other):
self.update_ttl(other.ttl)
super(Rdataset, self).union_update(other)
def intersection_update(self, other):
self.update_ttl(other.ttl)
super(Rdataset, self).intersection_update(other)
def update(self, other):
"""Add all rdatas in other to self.
*other*, a ``dns.rdataset.Rdataset``, the rdataset from which
to update.
"""
self.update_ttl(other.ttl)
super(Rdataset, self).update(other)
def __repr__(self):
if self.covers == 0:
ctext = ''
else:
ctext = '(' + dns.rdatatype.to_text(self.covers) + ')'
return '<DNS ' + dns.rdataclass.to_text(self.rdclass) + ' ' + \
dns.rdatatype.to_text(self.rdtype) + ctext + ' rdataset>'
def __str__(self):
return self.to_text()
def __eq__(self, other):
if not isinstance(other, Rdataset):
return False
if self.rdclass != other.rdclass or \
self.rdtype != other.rdtype or \
self.covers != other.covers:
return False
return super(Rdataset, self).__eq__(other)
def __ne__(self, other):
return not self.__eq__(other)
def to_text(self, name=None, origin=None, relativize=True,
override_rdclass=None, **kw):
"""Convert the rdataset into DNS master file format.
See ``dns.name.Name.choose_relativity`` for more information
on how *origin* and *relativize* determine the way names
are emitted.
Any additional keyword arguments are passed on to the rdata
``to_text()`` method.
*name*, a ``dns.name.Name``. If name is not ``None``, emit RRs with
*name* as the owner name.
*origin*, a ``dns.name.Name`` or ``None``, the origin for relative
names.
*relativize*, a ``bool``. If ``True``, names will be relativized
to *origin*.
"""
if name is not None:
name = name.choose_relativity(origin, relativize)
ntext = str(name)
pad = ' '
else:
ntext = ''
pad = ''
s = StringIO()
if override_rdclass is not None:
rdclass = override_rdclass
else:
rdclass = self.rdclass
if len(self) == 0:
#
# Empty rdatasets are used for the question section, and in
# some dynamic updates, so we don't need to print out the TTL
# (which is meaningless anyway).
#
s.write(u'{}{}{} {}\n'.format(ntext, pad,
dns.rdataclass.to_text(rdclass),
dns.rdatatype.to_text(self.rdtype)))
else:
for rd in self:
s.write(u'%s%s%d %s %s %s\n' %
(ntext, pad, self.ttl, dns.rdataclass.to_text(rdclass),
dns.rdatatype.to_text(self.rdtype),
rd.to_text(origin=origin, relativize=relativize,
**kw)))
#
# We strip off the final \n for the caller's convenience in printing
#
return s.getvalue()[:-1]
def to_wire(self, name, file, compress=None, origin=None,
override_rdclass=None, want_shuffle=True):
"""Convert the rdataset to wire format.
*name*, a ``dns.name.Name`` is the owner name to use.
*file* is the file where the name is emitted (typically a
BytesIO file).
*compress*, a ``dict``, is the compression table to use. If
``None`` (the default), names will not be compressed.
*origin* is a ``dns.name.Name`` or ``None``. If the name is
relative and origin is not ``None``, then *origin* will be appended
to it.
*override_rdclass*, an ``int``, is used as the class instead of the
class of the rdataset. This is useful when rendering rdatasets
associated with dynamic updates.
*want_shuffle*, a ``bool``. If ``True``, then the order of the
Rdatas within the Rdataset will be shuffled before rendering.
Returns an ``int``, the number of records emitted.
"""
if override_rdclass is not None:
rdclass = override_rdclass
want_shuffle = False
else:
rdclass = self.rdclass
file.seek(0, 2)
if len(self) == 0:
name.to_wire(file, compress, origin)
stuff = struct.pack("!HHIH", self.rdtype, rdclass, 0, 0)
file.write(stuff)
return 1
else:
if want_shuffle:
l = list(self)
random.shuffle(l)
else:
l = self
for rd in l:
name.to_wire(file, compress, origin)
stuff = struct.pack("!HHIH", self.rdtype, rdclass,
self.ttl, 0)
file.write(stuff)
start = file.tell()
rd.to_wire(file, compress, origin)
end = file.tell()
assert end - start < 65536
file.seek(start - 2)
stuff = struct.pack("!H", end - start)
file.write(stuff)
file.seek(0, 2)
return len(self)
def match(self, rdclass, rdtype, covers):
"""Returns ``True`` if this rdataset matches the specified class,
type, and covers.
"""
if self.rdclass == rdclass and \
self.rdtype == rdtype and \
self.covers == covers:
return True
return False
def from_text_list(rdclass, rdtype, ttl, text_rdatas):
"""Create an rdataset with the specified class, type, and TTL, and with
the specified list of rdatas in text format.
Returns a ``dns.rdataset.Rdataset`` object.
"""
if isinstance(rdclass, string_types):
rdclass = dns.rdataclass.from_text(rdclass)
if isinstance(rdtype, string_types):
rdtype = dns.rdatatype.from_text(rdtype)
r = Rdataset(rdclass, rdtype)
r.update_ttl(ttl)
for t in text_rdatas:
rd = dns.rdata.from_text(r.rdclass, r.rdtype, t)
r.add(rd)
return r
def from_text(rdclass, rdtype, ttl, *text_rdatas):
"""Create an rdataset with the specified class, type, and TTL, and with
the specified rdatas in text format.
Returns a ``dns.rdataset.Rdataset`` object.
"""
return from_text_list(rdclass, rdtype, ttl, text_rdatas)
def from_rdata_list(ttl, rdatas):
"""Create an rdataset with the specified TTL, and with
the specified list of rdata objects.
Returns a ``dns.rdataset.Rdataset`` object.
"""
if len(rdatas) == 0:
raise ValueError("rdata list must not be empty")
r = None
for rd in rdatas:
if r is None:
r = Rdataset(rd.rdclass, rd.rdtype)
r.update_ttl(ttl)
r.add(rd)
return r
def from_rdata(ttl, *rdatas):
"""Create an rdataset with the specified TTL, and with
the specified rdata objects.
Returns a ``dns.rdataset.Rdataset`` object.
"""
return from_rdata_list(ttl, rdatas)

View file

@ -0,0 +1,287 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2001-2017 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS Rdata Types."""
import re
import dns.exception
NONE = 0
A = 1
NS = 2
MD = 3
MF = 4
CNAME = 5
SOA = 6
MB = 7
MG = 8
MR = 9
NULL = 10
WKS = 11
PTR = 12
HINFO = 13
MINFO = 14
MX = 15
TXT = 16
RP = 17
AFSDB = 18
X25 = 19
ISDN = 20
RT = 21
NSAP = 22
NSAP_PTR = 23
SIG = 24
KEY = 25
PX = 26
GPOS = 27
AAAA = 28
LOC = 29
NXT = 30
SRV = 33
NAPTR = 35
KX = 36
CERT = 37
A6 = 38
DNAME = 39
OPT = 41
APL = 42
DS = 43
SSHFP = 44
IPSECKEY = 45
RRSIG = 46
NSEC = 47
DNSKEY = 48
DHCID = 49
NSEC3 = 50
NSEC3PARAM = 51
TLSA = 52
HIP = 55
CDS = 59
CDNSKEY = 60
OPENPGPKEY = 61
CSYNC = 62
SPF = 99
UNSPEC = 103
EUI48 = 108
EUI64 = 109
TKEY = 249
TSIG = 250
IXFR = 251
AXFR = 252
MAILB = 253
MAILA = 254
ANY = 255
URI = 256
CAA = 257
AVC = 258
TA = 32768
DLV = 32769
_by_text = {
'NONE': NONE,
'A': A,
'NS': NS,
'MD': MD,
'MF': MF,
'CNAME': CNAME,
'SOA': SOA,
'MB': MB,
'MG': MG,
'MR': MR,
'NULL': NULL,
'WKS': WKS,
'PTR': PTR,
'HINFO': HINFO,
'MINFO': MINFO,
'MX': MX,
'TXT': TXT,
'RP': RP,
'AFSDB': AFSDB,
'X25': X25,
'ISDN': ISDN,
'RT': RT,
'NSAP': NSAP,
'NSAP-PTR': NSAP_PTR,
'SIG': SIG,
'KEY': KEY,
'PX': PX,
'GPOS': GPOS,
'AAAA': AAAA,
'LOC': LOC,
'NXT': NXT,
'SRV': SRV,
'NAPTR': NAPTR,
'KX': KX,
'CERT': CERT,
'A6': A6,
'DNAME': DNAME,
'OPT': OPT,
'APL': APL,
'DS': DS,
'SSHFP': SSHFP,
'IPSECKEY': IPSECKEY,
'RRSIG': RRSIG,
'NSEC': NSEC,
'DNSKEY': DNSKEY,
'DHCID': DHCID,
'NSEC3': NSEC3,
'NSEC3PARAM': NSEC3PARAM,
'TLSA': TLSA,
'HIP': HIP,
'CDS': CDS,
'CDNSKEY': CDNSKEY,
'OPENPGPKEY': OPENPGPKEY,
'CSYNC': CSYNC,
'SPF': SPF,
'UNSPEC': UNSPEC,
'EUI48': EUI48,
'EUI64': EUI64,
'TKEY': TKEY,
'TSIG': TSIG,
'IXFR': IXFR,
'AXFR': AXFR,
'MAILB': MAILB,
'MAILA': MAILA,
'ANY': ANY,
'URI': URI,
'CAA': CAA,
'AVC': AVC,
'TA': TA,
'DLV': DLV,
}
# We construct the inverse mapping programmatically to ensure that we
# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that
# would cause the mapping not to be true inverse.
_by_value = {y: x for x, y in _by_text.items()}
_metatypes = {
OPT: True
}
_singletons = {
SOA: True,
NXT: True,
DNAME: True,
NSEC: True,
CNAME: True,
}
_unknown_type_pattern = re.compile('TYPE([0-9]+)$', re.I)
class UnknownRdatatype(dns.exception.DNSException):
"""DNS resource record type is unknown."""
def from_text(text):
"""Convert text into a DNS rdata type value.
The input text can be a defined DNS RR type mnemonic or
instance of the DNS generic type syntax.
For example, "NS" and "TYPE2" will both result in a value of 2.
Raises ``dns.rdatatype.UnknownRdatatype`` if the type is unknown.
Raises ``ValueError`` if the rdata type value is not >= 0 and <= 65535.
Returns an ``int``.
"""
value = _by_text.get(text.upper())
if value is None:
match = _unknown_type_pattern.match(text)
if match is None:
raise UnknownRdatatype
value = int(match.group(1))
if value < 0 or value > 65535:
raise ValueError("type must be between >= 0 and <= 65535")
return value
def to_text(value):
"""Convert a DNS rdata type value to text.
If the value has a known mnemonic, it will be used, otherwise the
DNS generic type syntax will be used.
Raises ``ValueError`` if the rdata type value is not >= 0 and <= 65535.
Returns a ``str``.
"""
if value < 0 or value > 65535:
raise ValueError("type must be between >= 0 and <= 65535")
text = _by_value.get(value)
if text is None:
text = 'TYPE' + repr(value)
return text
def is_metatype(rdtype):
"""True if the specified type is a metatype.
*rdtype* is an ``int``.
The currently defined metatypes are TKEY, TSIG, IXFR, AXFR, MAILA,
MAILB, ANY, and OPT.
Returns a ``bool``.
"""
if rdtype >= TKEY and rdtype <= ANY or rdtype in _metatypes:
return True
return False
def is_singleton(rdtype):
"""Is the specified type a singleton type?
Singleton types can only have a single rdata in an rdataset, or a single
RR in an RRset.
The currently defined singleton types are CNAME, DNAME, NSEC, NXT, and
SOA.
*rdtype* is an ``int``.
Returns a ``bool``.
"""
if rdtype in _singletons:
return True
return False
def register_type(rdtype, rdtype_text, is_singleton=False): # pylint: disable=redefined-outer-name
"""Dynamically register an rdatatype.
*rdtype*, an ``int``, the rdatatype to register.
*rdtype_text*, a ``text``, the textual form of the rdatatype.
*is_singleton*, a ``bool``, indicating if the type is a singleton (i.e.
RRsets of the type can have only one member.)
"""
_by_text[rdtype_text] = rdtype
_by_value[rdtype] = rdtype_text
if is_singleton:
_singletons[rdtype] = True

View file

@ -0,0 +1,55 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import dns.rdtypes.mxbase
class AFSDB(dns.rdtypes.mxbase.UncompressedDowncasingMX):
"""AFSDB record
@ivar subtype: the subtype value
@type subtype: int
@ivar hostname: the hostname name
@type hostname: dns.name.Name object"""
# Use the property mechanism to make "subtype" an alias for the
# "preference" attribute, and "hostname" an alias for the "exchange"
# attribute.
#
# This lets us inherit the UncompressedMX implementation but lets
# the caller use appropriate attribute names for the rdata type.
#
# We probably lose some performance vs. a cut-and-paste
# implementation, but this way we don't copy code, and that's
# good.
def get_subtype(self):
return self.preference
def set_subtype(self, subtype):
self.preference = subtype
subtype = property(get_subtype, set_subtype)
def get_hostname(self):
return self.exchange
def set_hostname(self, hostname):
self.exchange = hostname
hostname = property(get_hostname, set_hostname)

View file

@ -0,0 +1,25 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2016 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import dns.rdtypes.txtbase
class AVC(dns.rdtypes.txtbase.TXTBase):
"""AVC record
@see: U{http://www.iana.org/assignments/dns-parameters/AVC/avc-completed-template}"""

View file

@ -0,0 +1,75 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import struct
import dns.exception
import dns.rdata
import dns.tokenizer
class CAA(dns.rdata.Rdata):
"""CAA (Certification Authority Authorization) record
@ivar flags: the flags
@type flags: int
@ivar tag: the tag
@type tag: string
@ivar value: the value
@type value: string
@see: RFC 6844"""
__slots__ = ['flags', 'tag', 'value']
def __init__(self, rdclass, rdtype, flags, tag, value):
super(CAA, self).__init__(rdclass, rdtype)
self.flags = flags
self.tag = tag
self.value = value
def to_text(self, origin=None, relativize=True, **kw):
return '%u %s "%s"' % (self.flags,
dns.rdata._escapify(self.tag),
dns.rdata._escapify(self.value))
@classmethod
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
flags = tok.get_uint8()
tag = tok.get_string().encode()
if len(tag) > 255:
raise dns.exception.SyntaxError("tag too long")
if not tag.isalnum():
raise dns.exception.SyntaxError("tag is not alphanumeric")
value = tok.get_string().encode()
return cls(rdclass, rdtype, flags, tag, value)
def to_wire(self, file, compress=None, origin=None):
file.write(struct.pack('!B', self.flags))
l = len(self.tag)
assert l < 256
file.write(struct.pack('!B', l))
file.write(self.tag)
file.write(self.value)
@classmethod
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
(flags, l) = struct.unpack('!BB', wire[current: current + 2])
current += 2
tag = wire[current: current + l]
value = wire[current + l:current + rdlen - 2]
return cls(rdclass, rdtype, flags, tag, value)

View file

@ -0,0 +1,27 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import dns.rdtypes.dnskeybase
from dns.rdtypes.dnskeybase import flags_to_text_set, flags_from_text_set
__all__ = ['flags_to_text_set', 'flags_from_text_set']
class CDNSKEY(dns.rdtypes.dnskeybase.DNSKEYBase):
"""CDNSKEY record"""

View file

@ -0,0 +1,23 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import dns.rdtypes.dsbase
class CDS(dns.rdtypes.dsbase.DSBase):
"""CDS record"""

View file

@ -0,0 +1,123 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import struct
import base64
import dns.exception
import dns.dnssec
import dns.rdata
import dns.tokenizer
_ctype_by_value = {
1: 'PKIX',
2: 'SPKI',
3: 'PGP',
253: 'URI',
254: 'OID',
}
_ctype_by_name = {
'PKIX': 1,
'SPKI': 2,
'PGP': 3,
'URI': 253,
'OID': 254,
}
def _ctype_from_text(what):
v = _ctype_by_name.get(what)
if v is not None:
return v
return int(what)
def _ctype_to_text(what):
v = _ctype_by_value.get(what)
if v is not None:
return v
return str(what)
class CERT(dns.rdata.Rdata):
"""CERT record
@ivar certificate_type: certificate type
@type certificate_type: int
@ivar key_tag: key tag
@type key_tag: int
@ivar algorithm: algorithm
@type algorithm: int
@ivar certificate: the certificate or CRL
@type certificate: string
@see: RFC 2538"""
__slots__ = ['certificate_type', 'key_tag', 'algorithm', 'certificate']
def __init__(self, rdclass, rdtype, certificate_type, key_tag, algorithm,
certificate):
super(CERT, self).__init__(rdclass, rdtype)
self.certificate_type = certificate_type
self.key_tag = key_tag
self.algorithm = algorithm
self.certificate = certificate
def to_text(self, origin=None, relativize=True, **kw):
certificate_type = _ctype_to_text(self.certificate_type)
return "%s %d %s %s" % (certificate_type, self.key_tag,
dns.dnssec.algorithm_to_text(self.algorithm),
dns.rdata._base64ify(self.certificate))
@classmethod
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
certificate_type = _ctype_from_text(tok.get_string())
key_tag = tok.get_uint16()
algorithm = dns.dnssec.algorithm_from_text(tok.get_string())
if algorithm < 0 or algorithm > 255:
raise dns.exception.SyntaxError("bad algorithm type")
chunks = []
while 1:
t = tok.get().unescape()
if t.is_eol_or_eof():
break
if not t.is_identifier():
raise dns.exception.SyntaxError
chunks.append(t.value.encode())
b64 = b''.join(chunks)
certificate = base64.b64decode(b64)
return cls(rdclass, rdtype, certificate_type, key_tag,
algorithm, certificate)
def to_wire(self, file, compress=None, origin=None):
prefix = struct.pack("!HHB", self.certificate_type, self.key_tag,
self.algorithm)
file.write(prefix)
file.write(self.certificate)
@classmethod
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
prefix = wire[current: current + 5].unwrap()
current += 5
rdlen -= 5
if rdlen < 0:
raise dns.exception.FormError
(certificate_type, key_tag, algorithm) = struct.unpack("!HHB", prefix)
certificate = wire[current: current + rdlen].unwrap()
return cls(rdclass, rdtype, certificate_type, key_tag, algorithm,
certificate)

View file

@ -0,0 +1,27 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import dns.rdtypes.nsbase
class CNAME(dns.rdtypes.nsbase.NSBase):
"""CNAME record
Note: although CNAME is officially a singleton type, dnspython allows
non-singleton CNAME rdatasets because such sets have been commonly
used by BIND and other nameservers for load balancing."""

View file

@ -0,0 +1,126 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2004-2007, 2009-2011, 2016 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import struct
import dns.exception
import dns.rdata
import dns.rdatatype
import dns.name
from dns._compat import xrange
class CSYNC(dns.rdata.Rdata):
"""CSYNC record
@ivar serial: the SOA serial number
@type serial: int
@ivar flags: the CSYNC flags
@type flags: int
@ivar windows: the windowed bitmap list
@type windows: list of (window number, string) tuples"""
__slots__ = ['serial', 'flags', 'windows']
def __init__(self, rdclass, rdtype, serial, flags, windows):
super(CSYNC, self).__init__(rdclass, rdtype)
self.serial = serial
self.flags = flags
self.windows = windows
def to_text(self, origin=None, relativize=True, **kw):
text = ''
for (window, bitmap) in self.windows:
bits = []
for i in xrange(0, len(bitmap)):
byte = bitmap[i]
for j in xrange(0, 8):
if byte & (0x80 >> j):
bits.append(dns.rdatatype.to_text(window * 256 +
i * 8 + j))
text += (' ' + ' '.join(bits))
return '%d %d%s' % (self.serial, self.flags, text)
@classmethod
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
serial = tok.get_uint32()
flags = tok.get_uint16()
rdtypes = []
while 1:
token = tok.get().unescape()
if token.is_eol_or_eof():
break
nrdtype = dns.rdatatype.from_text(token.value)
if nrdtype == 0:
raise dns.exception.SyntaxError("CSYNC with bit 0")
if nrdtype > 65535:
raise dns.exception.SyntaxError("CSYNC with bit > 65535")
rdtypes.append(nrdtype)
rdtypes.sort()
window = 0
octets = 0
prior_rdtype = 0
bitmap = bytearray(b'\0' * 32)
windows = []
for nrdtype in rdtypes:
if nrdtype == prior_rdtype:
continue
prior_rdtype = nrdtype
new_window = nrdtype // 256
if new_window != window:
windows.append((window, bitmap[0:octets]))
bitmap = bytearray(b'\0' * 32)
window = new_window
offset = nrdtype % 256
byte = offset // 8
bit = offset % 8
octets = byte + 1
bitmap[byte] = bitmap[byte] | (0x80 >> bit)
windows.append((window, bitmap[0:octets]))
return cls(rdclass, rdtype, serial, flags, windows)
def to_wire(self, file, compress=None, origin=None):
file.write(struct.pack('!IH', self.serial, self.flags))
for (window, bitmap) in self.windows:
file.write(struct.pack('!BB', window, len(bitmap)))
file.write(bitmap)
@classmethod
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
if rdlen < 6:
raise dns.exception.FormError("CSYNC too short")
(serial, flags) = struct.unpack("!IH", wire[current: current + 6])
current += 6
rdlen -= 6
windows = []
while rdlen > 0:
if rdlen < 3:
raise dns.exception.FormError("CSYNC too short")
window = wire[current]
octets = wire[current + 1]
if octets == 0 or octets > 32:
raise dns.exception.FormError("bad CSYNC octets")
current += 2
rdlen -= 2
if rdlen < octets:
raise dns.exception.FormError("bad CSYNC bitmap length")
bitmap = bytearray(wire[current: current + octets].unwrap())
current += octets
rdlen -= octets
windows.append((window, bitmap))
return cls(rdclass, rdtype, serial, flags, windows)

View file

@ -0,0 +1,23 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import dns.rdtypes.dsbase
class DLV(dns.rdtypes.dsbase.DSBase):
"""DLV record"""

View file

@ -0,0 +1,26 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import dns.rdtypes.nsbase
class DNAME(dns.rdtypes.nsbase.UncompressedNS):
"""DNAME record"""
def to_digestable(self, origin=None):
return self.target.to_digestable(origin)

View file

@ -0,0 +1,27 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import dns.rdtypes.dnskeybase
from dns.rdtypes.dnskeybase import flags_to_text_set, flags_from_text_set
__all__ = ['flags_to_text_set', 'flags_from_text_set']
class DNSKEY(dns.rdtypes.dnskeybase.DNSKEYBase):
"""DNSKEY record"""

View file

@ -0,0 +1,23 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import dns.rdtypes.dsbase
class DS(dns.rdtypes.dsbase.DSBase):
"""DS record"""

View file

@ -0,0 +1,29 @@
# Copyright (C) 2015 Red Hat, Inc.
# Author: Petr Spacek <pspacek@redhat.com>
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import dns.rdtypes.euibase
class EUI48(dns.rdtypes.euibase.EUIBase):
"""EUI48 record
@ivar fingerprint: 48-bit Extended Unique Identifier (EUI-48)
@type fingerprint: string
@see: rfc7043.txt"""
byte_len = 6 # 0123456789ab (in hex)
text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab

View file

@ -0,0 +1,29 @@
# Copyright (C) 2015 Red Hat, Inc.
# Author: Petr Spacek <pspacek@redhat.com>
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import dns.rdtypes.euibase
class EUI64(dns.rdtypes.euibase.EUIBase):
"""EUI64 record
@ivar fingerprint: 64-bit Extended Unique Identifier (EUI-64)
@type fingerprint: string
@see: rfc7043.txt"""
byte_len = 8 # 0123456789abcdef (in hex)
text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab-cd-ef

View file

@ -0,0 +1,162 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import struct
import dns.exception
import dns.rdata
import dns.tokenizer
from dns._compat import long, text_type
def _validate_float_string(what):
if what[0] == b'-'[0] or what[0] == b'+'[0]:
what = what[1:]
if what.isdigit():
return
(left, right) = what.split(b'.')
if left == b'' and right == b'':
raise dns.exception.FormError
if not left == b'' and not left.decode().isdigit():
raise dns.exception.FormError
if not right == b'' and not right.decode().isdigit():
raise dns.exception.FormError
def _sanitize(value):
if isinstance(value, text_type):
return value.encode()
return value
class GPOS(dns.rdata.Rdata):
"""GPOS record
@ivar latitude: latitude
@type latitude: string
@ivar longitude: longitude
@type longitude: string
@ivar altitude: altitude
@type altitude: string
@see: RFC 1712"""
__slots__ = ['latitude', 'longitude', 'altitude']
def __init__(self, rdclass, rdtype, latitude, longitude, altitude):
super(GPOS, self).__init__(rdclass, rdtype)
if isinstance(latitude, float) or \
isinstance(latitude, int) or \
isinstance(latitude, long):
latitude = str(latitude)
if isinstance(longitude, float) or \
isinstance(longitude, int) or \
isinstance(longitude, long):
longitude = str(longitude)
if isinstance(altitude, float) or \
isinstance(altitude, int) or \
isinstance(altitude, long):
altitude = str(altitude)
latitude = _sanitize(latitude)
longitude = _sanitize(longitude)
altitude = _sanitize(altitude)
_validate_float_string(latitude)
_validate_float_string(longitude)
_validate_float_string(altitude)
self.latitude = latitude
self.longitude = longitude
self.altitude = altitude
def to_text(self, origin=None, relativize=True, **kw):
return '{} {} {}'.format(self.latitude.decode(),
self.longitude.decode(),
self.altitude.decode())
@classmethod
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
latitude = tok.get_string()
longitude = tok.get_string()
altitude = tok.get_string()
tok.get_eol()
return cls(rdclass, rdtype, latitude, longitude, altitude)
def to_wire(self, file, compress=None, origin=None):
l = len(self.latitude)
assert l < 256
file.write(struct.pack('!B', l))
file.write(self.latitude)
l = len(self.longitude)
assert l < 256
file.write(struct.pack('!B', l))
file.write(self.longitude)
l = len(self.altitude)
assert l < 256
file.write(struct.pack('!B', l))
file.write(self.altitude)
@classmethod
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
l = wire[current]
current += 1
rdlen -= 1
if l > rdlen:
raise dns.exception.FormError
latitude = wire[current: current + l].unwrap()
current += l
rdlen -= l
l = wire[current]
current += 1
rdlen -= 1
if l > rdlen:
raise dns.exception.FormError
longitude = wire[current: current + l].unwrap()
current += l
rdlen -= l
l = wire[current]
current += 1
rdlen -= 1
if l != rdlen:
raise dns.exception.FormError
altitude = wire[current: current + l].unwrap()
return cls(rdclass, rdtype, latitude, longitude, altitude)
def _get_float_latitude(self):
return float(self.latitude)
def _set_float_latitude(self, value):
self.latitude = str(value)
float_latitude = property(_get_float_latitude, _set_float_latitude,
doc="latitude as a floating point value")
def _get_float_longitude(self):
return float(self.longitude)
def _set_float_longitude(self, value):
self.longitude = str(value)
float_longitude = property(_get_float_longitude, _set_float_longitude,
doc="longitude as a floating point value")
def _get_float_altitude(self):
return float(self.altitude)
def _set_float_altitude(self, value):
self.altitude = str(value)
float_altitude = property(_get_float_altitude, _set_float_altitude,
doc="altitude as a floating point value")

View file

@ -0,0 +1,86 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import struct
import dns.exception
import dns.rdata
import dns.tokenizer
from dns._compat import text_type
class HINFO(dns.rdata.Rdata):
"""HINFO record
@ivar cpu: the CPU type
@type cpu: string
@ivar os: the OS type
@type os: string
@see: RFC 1035"""
__slots__ = ['cpu', 'os']
def __init__(self, rdclass, rdtype, cpu, os):
super(HINFO, self).__init__(rdclass, rdtype)
if isinstance(cpu, text_type):
self.cpu = cpu.encode()
else:
self.cpu = cpu
if isinstance(os, text_type):
self.os = os.encode()
else:
self.os = os
def to_text(self, origin=None, relativize=True, **kw):
return '"{}" "{}"'.format(dns.rdata._escapify(self.cpu),
dns.rdata._escapify(self.os))
@classmethod
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
cpu = tok.get_string()
os = tok.get_string()
tok.get_eol()
return cls(rdclass, rdtype, cpu, os)
def to_wire(self, file, compress=None, origin=None):
l = len(self.cpu)
assert l < 256
file.write(struct.pack('!B', l))
file.write(self.cpu)
l = len(self.os)
assert l < 256
file.write(struct.pack('!B', l))
file.write(self.os)
@classmethod
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
l = wire[current]
current += 1
rdlen -= 1
if l > rdlen:
raise dns.exception.FormError
cpu = wire[current:current + l].unwrap()
current += l
rdlen -= l
l = wire[current]
current += 1
rdlen -= 1
if l != rdlen:
raise dns.exception.FormError
os = wire[current: current + l].unwrap()
return cls(rdclass, rdtype, cpu, os)

View file

@ -0,0 +1,115 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2010, 2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import struct
import base64
import binascii
import dns.exception
import dns.rdata
import dns.rdatatype
class HIP(dns.rdata.Rdata):
"""HIP record
@ivar hit: the host identity tag
@type hit: string
@ivar algorithm: the public key cryptographic algorithm
@type algorithm: int
@ivar key: the public key
@type key: string
@ivar servers: the rendezvous servers
@type servers: list of dns.name.Name objects
@see: RFC 5205"""
__slots__ = ['hit', 'algorithm', 'key', 'servers']
def __init__(self, rdclass, rdtype, hit, algorithm, key, servers):
super(HIP, self).__init__(rdclass, rdtype)
self.hit = hit
self.algorithm = algorithm
self.key = key
self.servers = servers
def to_text(self, origin=None, relativize=True, **kw):
hit = binascii.hexlify(self.hit).decode()
key = base64.b64encode(self.key).replace(b'\n', b'').decode()
text = u''
servers = []
for server in self.servers:
servers.append(server.choose_relativity(origin, relativize))
if len(servers) > 0:
text += (u' ' + u' '.join((x.to_unicode() for x in servers)))
return u'%u %s %s%s' % (self.algorithm, hit, key, text)
@classmethod
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
algorithm = tok.get_uint8()
hit = binascii.unhexlify(tok.get_string().encode())
if len(hit) > 255:
raise dns.exception.SyntaxError("HIT too long")
key = base64.b64decode(tok.get_string().encode())
servers = []
while 1:
token = tok.get()
if token.is_eol_or_eof():
break
server = dns.name.from_text(token.value, origin)
server.choose_relativity(origin, relativize)
servers.append(server)
return cls(rdclass, rdtype, hit, algorithm, key, servers)
def to_wire(self, file, compress=None, origin=None):
lh = len(self.hit)
lk = len(self.key)
file.write(struct.pack("!BBH", lh, self.algorithm, lk))
file.write(self.hit)
file.write(self.key)
for server in self.servers:
server.to_wire(file, None, origin)
@classmethod
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
(lh, algorithm, lk) = struct.unpack('!BBH',
wire[current: current + 4])
current += 4
rdlen -= 4
hit = wire[current: current + lh].unwrap()
current += lh
rdlen -= lh
key = wire[current: current + lk].unwrap()
current += lk
rdlen -= lk
servers = []
while rdlen > 0:
(server, cused) = dns.name.from_wire(wire[: current + rdlen],
current)
current += cused
rdlen -= cused
if origin is not None:
server = server.relativize(origin)
servers.append(server)
return cls(rdclass, rdtype, hit, algorithm, key, servers)
def choose_relativity(self, origin=None, relativize=True):
servers = []
for server in self.servers:
server = server.choose_relativity(origin, relativize)
servers.append(server)
self.servers = servers

View file

@ -0,0 +1,99 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import struct
import dns.exception
import dns.rdata
import dns.tokenizer
from dns._compat import text_type
class ISDN(dns.rdata.Rdata):
"""ISDN record
@ivar address: the ISDN address
@type address: string
@ivar subaddress: the ISDN subaddress (or '' if not present)
@type subaddress: string
@see: RFC 1183"""
__slots__ = ['address', 'subaddress']
def __init__(self, rdclass, rdtype, address, subaddress):
super(ISDN, self).__init__(rdclass, rdtype)
if isinstance(address, text_type):
self.address = address.encode()
else:
self.address = address
if isinstance(address, text_type):
self.subaddress = subaddress.encode()
else:
self.subaddress = subaddress
def to_text(self, origin=None, relativize=True, **kw):
if self.subaddress:
return '"{}" "{}"'.format(dns.rdata._escapify(self.address),
dns.rdata._escapify(self.subaddress))
else:
return '"%s"' % dns.rdata._escapify(self.address)
@classmethod
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
address = tok.get_string()
t = tok.get()
if not t.is_eol_or_eof():
tok.unget(t)
subaddress = tok.get_string()
else:
tok.unget(t)
subaddress = ''
tok.get_eol()
return cls(rdclass, rdtype, address, subaddress)
def to_wire(self, file, compress=None, origin=None):
l = len(self.address)
assert l < 256
file.write(struct.pack('!B', l))
file.write(self.address)
l = len(self.subaddress)
if l > 0:
assert l < 256
file.write(struct.pack('!B', l))
file.write(self.subaddress)
@classmethod
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
l = wire[current]
current += 1
rdlen -= 1
if l > rdlen:
raise dns.exception.FormError
address = wire[current: current + l].unwrap()
current += l
rdlen -= l
if rdlen > 0:
l = wire[current]
current += 1
rdlen -= 1
if l != rdlen:
raise dns.exception.FormError
subaddress = wire[current: current + l].unwrap()
else:
subaddress = ''
return cls(rdclass, rdtype, address, subaddress)

View file

@ -0,0 +1,327 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from __future__ import division
import struct
import dns.exception
import dns.rdata
from dns._compat import long, xrange, round_py2_compat
_pows = tuple(long(10**i) for i in range(0, 11))
# default values are in centimeters
_default_size = 100.0
_default_hprec = 1000000.0
_default_vprec = 1000.0
def _exponent_of(what, desc):
if what == 0:
return 0
exp = None
for i in xrange(len(_pows)):
if what // _pows[i] == long(0):
exp = i - 1
break
if exp is None or exp < 0:
raise dns.exception.SyntaxError("%s value out of bounds" % desc)
return exp
def _float_to_tuple(what):
if what < 0:
sign = -1
what *= -1
else:
sign = 1
what = round_py2_compat(what * 3600000)
degrees = int(what // 3600000)
what -= degrees * 3600000
minutes = int(what // 60000)
what -= minutes * 60000
seconds = int(what // 1000)
what -= int(seconds * 1000)
what = int(what)
return (degrees, minutes, seconds, what, sign)
def _tuple_to_float(what):
value = float(what[0])
value += float(what[1]) / 60.0
value += float(what[2]) / 3600.0
value += float(what[3]) / 3600000.0
return float(what[4]) * value
def _encode_size(what, desc):
what = long(what)
exponent = _exponent_of(what, desc) & 0xF
base = what // pow(10, exponent) & 0xF
return base * 16 + exponent
def _decode_size(what, desc):
exponent = what & 0x0F
if exponent > 9:
raise dns.exception.SyntaxError("bad %s exponent" % desc)
base = (what & 0xF0) >> 4
if base > 9:
raise dns.exception.SyntaxError("bad %s base" % desc)
return long(base) * pow(10, exponent)
class LOC(dns.rdata.Rdata):
"""LOC record
@ivar latitude: latitude
@type latitude: (int, int, int, int, sign) tuple specifying the degrees, minutes,
seconds, milliseconds, and sign of the coordinate.
@ivar longitude: longitude
@type longitude: (int, int, int, int, sign) tuple specifying the degrees,
minutes, seconds, milliseconds, and sign of the coordinate.
@ivar altitude: altitude
@type altitude: float
@ivar size: size of the sphere
@type size: float
@ivar horizontal_precision: horizontal precision
@type horizontal_precision: float
@ivar vertical_precision: vertical precision
@type vertical_precision: float
@see: RFC 1876"""
__slots__ = ['latitude', 'longitude', 'altitude', 'size',
'horizontal_precision', 'vertical_precision']
def __init__(self, rdclass, rdtype, latitude, longitude, altitude,
size=_default_size, hprec=_default_hprec,
vprec=_default_vprec):
"""Initialize a LOC record instance.
The parameters I{latitude} and I{longitude} may be either a 4-tuple
of integers specifying (degrees, minutes, seconds, milliseconds),
or they may be floating point values specifying the number of
degrees. The other parameters are floats. Size, horizontal precision,
and vertical precision are specified in centimeters."""
super(LOC, self).__init__(rdclass, rdtype)
if isinstance(latitude, int) or isinstance(latitude, long):
latitude = float(latitude)
if isinstance(latitude, float):
latitude = _float_to_tuple(latitude)
self.latitude = latitude
if isinstance(longitude, int) or isinstance(longitude, long):
longitude = float(longitude)
if isinstance(longitude, float):
longitude = _float_to_tuple(longitude)
self.longitude = longitude
self.altitude = float(altitude)
self.size = float(size)
self.horizontal_precision = float(hprec)
self.vertical_precision = float(vprec)
def to_text(self, origin=None, relativize=True, **kw):
if self.latitude[4] > 0:
lat_hemisphere = 'N'
else:
lat_hemisphere = 'S'
if self.longitude[4] > 0:
long_hemisphere = 'E'
else:
long_hemisphere = 'W'
text = "%d %d %d.%03d %s %d %d %d.%03d %s %0.2fm" % (
self.latitude[0], self.latitude[1],
self.latitude[2], self.latitude[3], lat_hemisphere,
self.longitude[0], self.longitude[1], self.longitude[2],
self.longitude[3], long_hemisphere,
self.altitude / 100.0
)
# do not print default values
if self.size != _default_size or \
self.horizontal_precision != _default_hprec or \
self.vertical_precision != _default_vprec:
text += " {:0.2f}m {:0.2f}m {:0.2f}m".format(
self.size / 100.0, self.horizontal_precision / 100.0,
self.vertical_precision / 100.0
)
return text
@classmethod
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
latitude = [0, 0, 0, 0, 1]
longitude = [0, 0, 0, 0, 1]
size = _default_size
hprec = _default_hprec
vprec = _default_vprec
latitude[0] = tok.get_int()
t = tok.get_string()
if t.isdigit():
latitude[1] = int(t)
t = tok.get_string()
if '.' in t:
(seconds, milliseconds) = t.split('.')
if not seconds.isdigit():
raise dns.exception.SyntaxError(
'bad latitude seconds value')
latitude[2] = int(seconds)
if latitude[2] >= 60:
raise dns.exception.SyntaxError('latitude seconds >= 60')
l = len(milliseconds)
if l == 0 or l > 3 or not milliseconds.isdigit():
raise dns.exception.SyntaxError(
'bad latitude milliseconds value')
if l == 1:
m = 100
elif l == 2:
m = 10
else:
m = 1
latitude[3] = m * int(milliseconds)
t = tok.get_string()
elif t.isdigit():
latitude[2] = int(t)
t = tok.get_string()
if t == 'S':
latitude[4] = -1
elif t != 'N':
raise dns.exception.SyntaxError('bad latitude hemisphere value')
longitude[0] = tok.get_int()
t = tok.get_string()
if t.isdigit():
longitude[1] = int(t)
t = tok.get_string()
if '.' in t:
(seconds, milliseconds) = t.split('.')
if not seconds.isdigit():
raise dns.exception.SyntaxError(
'bad longitude seconds value')
longitude[2] = int(seconds)
if longitude[2] >= 60:
raise dns.exception.SyntaxError('longitude seconds >= 60')
l = len(milliseconds)
if l == 0 or l > 3 or not milliseconds.isdigit():
raise dns.exception.SyntaxError(
'bad longitude milliseconds value')
if l == 1:
m = 100
elif l == 2:
m = 10
else:
m = 1
longitude[3] = m * int(milliseconds)
t = tok.get_string()
elif t.isdigit():
longitude[2] = int(t)
t = tok.get_string()
if t == 'W':
longitude[4] = -1
elif t != 'E':
raise dns.exception.SyntaxError('bad longitude hemisphere value')
t = tok.get_string()
if t[-1] == 'm':
t = t[0: -1]
altitude = float(t) * 100.0 # m -> cm
token = tok.get().unescape()
if not token.is_eol_or_eof():
value = token.value
if value[-1] == 'm':
value = value[0: -1]
size = float(value) * 100.0 # m -> cm
token = tok.get().unescape()
if not token.is_eol_or_eof():
value = token.value
if value[-1] == 'm':
value = value[0: -1]
hprec = float(value) * 100.0 # m -> cm
token = tok.get().unescape()
if not token.is_eol_or_eof():
value = token.value
if value[-1] == 'm':
value = value[0: -1]
vprec = float(value) * 100.0 # m -> cm
tok.get_eol()
return cls(rdclass, rdtype, latitude, longitude, altitude,
size, hprec, vprec)
def to_wire(self, file, compress=None, origin=None):
milliseconds = (self.latitude[0] * 3600000 +
self.latitude[1] * 60000 +
self.latitude[2] * 1000 +
self.latitude[3]) * self.latitude[4]
latitude = long(0x80000000) + milliseconds
milliseconds = (self.longitude[0] * 3600000 +
self.longitude[1] * 60000 +
self.longitude[2] * 1000 +
self.longitude[3]) * self.longitude[4]
longitude = long(0x80000000) + milliseconds
altitude = long(self.altitude) + long(10000000)
size = _encode_size(self.size, "size")
hprec = _encode_size(self.horizontal_precision, "horizontal precision")
vprec = _encode_size(self.vertical_precision, "vertical precision")
wire = struct.pack("!BBBBIII", 0, size, hprec, vprec, latitude,
longitude, altitude)
file.write(wire)
@classmethod
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
(version, size, hprec, vprec, latitude, longitude, altitude) = \
struct.unpack("!BBBBIII", wire[current: current + rdlen])
if latitude > long(0x80000000):
latitude = float(latitude - long(0x80000000)) / 3600000
else:
latitude = -1 * float(long(0x80000000) - latitude) / 3600000
if latitude < -90.0 or latitude > 90.0:
raise dns.exception.FormError("bad latitude")
if longitude > long(0x80000000):
longitude = float(longitude - long(0x80000000)) / 3600000
else:
longitude = -1 * float(long(0x80000000) - longitude) / 3600000
if longitude < -180.0 or longitude > 180.0:
raise dns.exception.FormError("bad longitude")
altitude = float(altitude) - 10000000.0
size = _decode_size(size, "size")
hprec = _decode_size(hprec, "horizontal precision")
vprec = _decode_size(vprec, "vertical precision")
return cls(rdclass, rdtype, latitude, longitude, altitude,
size, hprec, vprec)
def _get_float_latitude(self):
return _tuple_to_float(self.latitude)
def _set_float_latitude(self, value):
self.latitude = _float_to_tuple(value)
float_latitude = property(_get_float_latitude, _set_float_latitude,
doc="latitude as a floating point value")
def _get_float_longitude(self):
return _tuple_to_float(self.longitude)
def _set_float_longitude(self, value):
self.longitude = _float_to_tuple(value)
float_longitude = property(_get_float_longitude, _set_float_longitude,
doc="longitude as a floating point value")

View file

@ -0,0 +1,23 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import dns.rdtypes.mxbase
class MX(dns.rdtypes.mxbase.MXBase):
"""MX record"""

View file

@ -0,0 +1,23 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import dns.rdtypes.nsbase
class NS(dns.rdtypes.nsbase.NSBase):
"""NS record"""

View file

@ -0,0 +1,128 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import struct
import dns.exception
import dns.rdata
import dns.rdatatype
import dns.name
from dns._compat import xrange
class NSEC(dns.rdata.Rdata):
"""NSEC record
@ivar next: the next name
@type next: dns.name.Name object
@ivar windows: the windowed bitmap list
@type windows: list of (window number, string) tuples"""
__slots__ = ['next', 'windows']
def __init__(self, rdclass, rdtype, next, windows):
super(NSEC, self).__init__(rdclass, rdtype)
self.next = next
self.windows = windows
def to_text(self, origin=None, relativize=True, **kw):
next = self.next.choose_relativity(origin, relativize)
text = ''
for (window, bitmap) in self.windows:
bits = []
for i in xrange(0, len(bitmap)):
byte = bitmap[i]
for j in xrange(0, 8):
if byte & (0x80 >> j):
bits.append(dns.rdatatype.to_text(window * 256 +
i * 8 + j))
text += (' ' + ' '.join(bits))
return '{}{}'.format(next, text)
@classmethod
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
next = tok.get_name()
next = next.choose_relativity(origin, relativize)
rdtypes = []
while 1:
token = tok.get().unescape()
if token.is_eol_or_eof():
break
nrdtype = dns.rdatatype.from_text(token.value)
if nrdtype == 0:
raise dns.exception.SyntaxError("NSEC with bit 0")
if nrdtype > 65535:
raise dns.exception.SyntaxError("NSEC with bit > 65535")
rdtypes.append(nrdtype)
rdtypes.sort()
window = 0
octets = 0
prior_rdtype = 0
bitmap = bytearray(b'\0' * 32)
windows = []
for nrdtype in rdtypes:
if nrdtype == prior_rdtype:
continue
prior_rdtype = nrdtype
new_window = nrdtype // 256
if new_window != window:
windows.append((window, bitmap[0:octets]))
bitmap = bytearray(b'\0' * 32)
window = new_window
offset = nrdtype % 256
byte = offset // 8
bit = offset % 8
octets = byte + 1
bitmap[byte] = bitmap[byte] | (0x80 >> bit)
windows.append((window, bitmap[0:octets]))
return cls(rdclass, rdtype, next, windows)
def to_wire(self, file, compress=None, origin=None):
self.next.to_wire(file, None, origin)
for (window, bitmap) in self.windows:
file.write(struct.pack('!BB', window, len(bitmap)))
file.write(bitmap)
@classmethod
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
(next, cused) = dns.name.from_wire(wire[: current + rdlen], current)
current += cused
rdlen -= cused
windows = []
while rdlen > 0:
if rdlen < 3:
raise dns.exception.FormError("NSEC too short")
window = wire[current]
octets = wire[current + 1]
if octets == 0 or octets > 32:
raise dns.exception.FormError("bad NSEC octets")
current += 2
rdlen -= 2
if rdlen < octets:
raise dns.exception.FormError("bad NSEC bitmap length")
bitmap = bytearray(wire[current: current + octets].unwrap())
current += octets
rdlen -= octets
windows.append((window, bitmap))
if origin is not None:
next = next.relativize(origin)
return cls(rdclass, rdtype, next, windows)
def choose_relativity(self, origin=None, relativize=True):
self.next = self.next.choose_relativity(origin, relativize)

View file

@ -0,0 +1,196 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2004-2017 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import base64
import binascii
import string
import struct
import dns.exception
import dns.rdata
import dns.rdatatype
from dns._compat import xrange, text_type, PY3
# pylint: disable=deprecated-string-function
if PY3:
b32_hex_to_normal = bytes.maketrans(b'0123456789ABCDEFGHIJKLMNOPQRSTUV',
b'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567')
b32_normal_to_hex = bytes.maketrans(b'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567',
b'0123456789ABCDEFGHIJKLMNOPQRSTUV')
else:
b32_hex_to_normal = string.maketrans('0123456789ABCDEFGHIJKLMNOPQRSTUV',
'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567')
b32_normal_to_hex = string.maketrans('ABCDEFGHIJKLMNOPQRSTUVWXYZ234567',
'0123456789ABCDEFGHIJKLMNOPQRSTUV')
# pylint: enable=deprecated-string-function
# hash algorithm constants
SHA1 = 1
# flag constants
OPTOUT = 1
class NSEC3(dns.rdata.Rdata):
"""NSEC3 record
@ivar algorithm: the hash algorithm number
@type algorithm: int
@ivar flags: the flags
@type flags: int
@ivar iterations: the number of iterations
@type iterations: int
@ivar salt: the salt
@type salt: string
@ivar next: the next name hash
@type next: string
@ivar windows: the windowed bitmap list
@type windows: list of (window number, string) tuples"""
__slots__ = ['algorithm', 'flags', 'iterations', 'salt', 'next', 'windows']
def __init__(self, rdclass, rdtype, algorithm, flags, iterations, salt,
next, windows):
super(NSEC3, self).__init__(rdclass, rdtype)
self.algorithm = algorithm
self.flags = flags
self.iterations = iterations
if isinstance(salt, text_type):
self.salt = salt.encode()
else:
self.salt = salt
self.next = next
self.windows = windows
def to_text(self, origin=None, relativize=True, **kw):
next = base64.b32encode(self.next).translate(
b32_normal_to_hex).lower().decode()
if self.salt == b'':
salt = '-'
else:
salt = binascii.hexlify(self.salt).decode()
text = u''
for (window, bitmap) in self.windows:
bits = []
for i in xrange(0, len(bitmap)):
byte = bitmap[i]
for j in xrange(0, 8):
if byte & (0x80 >> j):
bits.append(dns.rdatatype.to_text(window * 256 +
i * 8 + j))
text += (u' ' + u' '.join(bits))
return u'%u %u %u %s %s%s' % (self.algorithm, self.flags,
self.iterations, salt, next, text)
@classmethod
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
algorithm = tok.get_uint8()
flags = tok.get_uint8()
iterations = tok.get_uint16()
salt = tok.get_string()
if salt == u'-':
salt = b''
else:
salt = binascii.unhexlify(salt.encode('ascii'))
next = tok.get_string().encode(
'ascii').upper().translate(b32_hex_to_normal)
next = base64.b32decode(next)
rdtypes = []
while 1:
token = tok.get().unescape()
if token.is_eol_or_eof():
break
nrdtype = dns.rdatatype.from_text(token.value)
if nrdtype == 0:
raise dns.exception.SyntaxError("NSEC3 with bit 0")
if nrdtype > 65535:
raise dns.exception.SyntaxError("NSEC3 with bit > 65535")
rdtypes.append(nrdtype)
rdtypes.sort()
window = 0
octets = 0
prior_rdtype = 0
bitmap = bytearray(b'\0' * 32)
windows = []
for nrdtype in rdtypes:
if nrdtype == prior_rdtype:
continue
prior_rdtype = nrdtype
new_window = nrdtype // 256
if new_window != window:
if octets != 0:
windows.append((window, bitmap[0:octets]))
bitmap = bytearray(b'\0' * 32)
window = new_window
offset = nrdtype % 256
byte = offset // 8
bit = offset % 8
octets = byte + 1
bitmap[byte] = bitmap[byte] | (0x80 >> bit)
if octets != 0:
windows.append((window, bitmap[0:octets]))
return cls(rdclass, rdtype, algorithm, flags, iterations, salt, next,
windows)
def to_wire(self, file, compress=None, origin=None):
l = len(self.salt)
file.write(struct.pack("!BBHB", self.algorithm, self.flags,
self.iterations, l))
file.write(self.salt)
l = len(self.next)
file.write(struct.pack("!B", l))
file.write(self.next)
for (window, bitmap) in self.windows:
file.write(struct.pack("!BB", window, len(bitmap)))
file.write(bitmap)
@classmethod
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
(algorithm, flags, iterations, slen) = \
struct.unpack('!BBHB', wire[current: current + 5])
current += 5
rdlen -= 5
salt = wire[current: current + slen].unwrap()
current += slen
rdlen -= slen
nlen = wire[current]
current += 1
rdlen -= 1
next = wire[current: current + nlen].unwrap()
current += nlen
rdlen -= nlen
windows = []
while rdlen > 0:
if rdlen < 3:
raise dns.exception.FormError("NSEC3 too short")
window = wire[current]
octets = wire[current + 1]
if octets == 0 or octets > 32:
raise dns.exception.FormError("bad NSEC3 octets")
current += 2
rdlen -= 2
if rdlen < octets:
raise dns.exception.FormError("bad NSEC3 bitmap length")
bitmap = bytearray(wire[current: current + octets].unwrap())
current += octets
rdlen -= octets
windows.append((window, bitmap))
return cls(rdclass, rdtype, algorithm, flags, iterations, salt, next,
windows)

View file

@ -0,0 +1,90 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import struct
import binascii
import dns.exception
import dns.rdata
from dns._compat import text_type
class NSEC3PARAM(dns.rdata.Rdata):
"""NSEC3PARAM record
@ivar algorithm: the hash algorithm number
@type algorithm: int
@ivar flags: the flags
@type flags: int
@ivar iterations: the number of iterations
@type iterations: int
@ivar salt: the salt
@type salt: string"""
__slots__ = ['algorithm', 'flags', 'iterations', 'salt']
def __init__(self, rdclass, rdtype, algorithm, flags, iterations, salt):
super(NSEC3PARAM, self).__init__(rdclass, rdtype)
self.algorithm = algorithm
self.flags = flags
self.iterations = iterations
if isinstance(salt, text_type):
self.salt = salt.encode()
else:
self.salt = salt
def to_text(self, origin=None, relativize=True, **kw):
if self.salt == b'':
salt = '-'
else:
salt = binascii.hexlify(self.salt).decode()
return '%u %u %u %s' % (self.algorithm, self.flags, self.iterations,
salt)
@classmethod
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
algorithm = tok.get_uint8()
flags = tok.get_uint8()
iterations = tok.get_uint16()
salt = tok.get_string()
if salt == '-':
salt = ''
else:
salt = binascii.unhexlify(salt.encode())
tok.get_eol()
return cls(rdclass, rdtype, algorithm, flags, iterations, salt)
def to_wire(self, file, compress=None, origin=None):
l = len(self.salt)
file.write(struct.pack("!BBHB", self.algorithm, self.flags,
self.iterations, l))
file.write(self.salt)
@classmethod
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
(algorithm, flags, iterations, slen) = \
struct.unpack('!BBHB',
wire[current: current + 5])
current += 5
rdlen -= 5
salt = wire[current: current + slen].unwrap()
current += slen
rdlen -= slen
if rdlen != 0:
raise dns.exception.FormError
return cls(rdclass, rdtype, algorithm, flags, iterations, salt)

View file

@ -0,0 +1,60 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2016 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import base64
import dns.exception
import dns.rdata
import dns.tokenizer
class OPENPGPKEY(dns.rdata.Rdata):
"""OPENPGPKEY record
@ivar key: the key
@type key: bytes
@see: RFC 7929
"""
def __init__(self, rdclass, rdtype, key):
super(OPENPGPKEY, self).__init__(rdclass, rdtype)
self.key = key
def to_text(self, origin=None, relativize=True, **kw):
return dns.rdata._base64ify(self.key)
@classmethod
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
chunks = []
while 1:
t = tok.get().unescape()
if t.is_eol_or_eof():
break
if not t.is_identifier():
raise dns.exception.SyntaxError
chunks.append(t.value.encode())
b64 = b''.join(chunks)
key = base64.b64decode(b64)
return cls(rdclass, rdtype, key)
def to_wire(self, file, compress=None, origin=None):
file.write(self.key)
@classmethod
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
key = wire[current: current + rdlen].unwrap()
return cls(rdclass, rdtype, key)

View file

@ -0,0 +1,23 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import dns.rdtypes.nsbase
class PTR(dns.rdtypes.nsbase.NSBase):
"""PTR record"""

View file

@ -0,0 +1,82 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import dns.exception
import dns.rdata
import dns.name
class RP(dns.rdata.Rdata):
"""RP record
@ivar mbox: The responsible person's mailbox
@type mbox: dns.name.Name object
@ivar txt: The owner name of a node with TXT records, or the root name
if no TXT records are associated with this RP.
@type txt: dns.name.Name object
@see: RFC 1183"""
__slots__ = ['mbox', 'txt']
def __init__(self, rdclass, rdtype, mbox, txt):
super(RP, self).__init__(rdclass, rdtype)
self.mbox = mbox
self.txt = txt
def to_text(self, origin=None, relativize=True, **kw):
mbox = self.mbox.choose_relativity(origin, relativize)
txt = self.txt.choose_relativity(origin, relativize)
return "{} {}".format(str(mbox), str(txt))
@classmethod
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
mbox = tok.get_name()
txt = tok.get_name()
mbox = mbox.choose_relativity(origin, relativize)
txt = txt.choose_relativity(origin, relativize)
tok.get_eol()
return cls(rdclass, rdtype, mbox, txt)
def to_wire(self, file, compress=None, origin=None):
self.mbox.to_wire(file, None, origin)
self.txt.to_wire(file, None, origin)
def to_digestable(self, origin=None):
return self.mbox.to_digestable(origin) + \
self.txt.to_digestable(origin)
@classmethod
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
(mbox, cused) = dns.name.from_wire(wire[: current + rdlen],
current)
current += cused
rdlen -= cused
if rdlen <= 0:
raise dns.exception.FormError
(txt, cused) = dns.name.from_wire(wire[: current + rdlen],
current)
if cused != rdlen:
raise dns.exception.FormError
if origin is not None:
mbox = mbox.relativize(origin)
txt = txt.relativize(origin)
return cls(rdclass, rdtype, mbox, txt)
def choose_relativity(self, origin=None, relativize=True):
self.mbox = self.mbox.choose_relativity(origin, relativize)
self.txt = self.txt.choose_relativity(origin, relativize)

View file

@ -0,0 +1,158 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import base64
import calendar
import struct
import time
import dns.dnssec
import dns.exception
import dns.rdata
import dns.rdatatype
class BadSigTime(dns.exception.DNSException):
"""Time in DNS SIG or RRSIG resource record cannot be parsed."""
def sigtime_to_posixtime(what):
if len(what) != 14:
raise BadSigTime
year = int(what[0:4])
month = int(what[4:6])
day = int(what[6:8])
hour = int(what[8:10])
minute = int(what[10:12])
second = int(what[12:14])
return calendar.timegm((year, month, day, hour, minute, second,
0, 0, 0))
def posixtime_to_sigtime(what):
return time.strftime('%Y%m%d%H%M%S', time.gmtime(what))
class RRSIG(dns.rdata.Rdata):
"""RRSIG record
@ivar type_covered: the rdata type this signature covers
@type type_covered: int
@ivar algorithm: the algorithm used for the sig
@type algorithm: int
@ivar labels: number of labels
@type labels: int
@ivar original_ttl: the original TTL
@type original_ttl: long
@ivar expiration: signature expiration time
@type expiration: long
@ivar inception: signature inception time
@type inception: long
@ivar key_tag: the key tag
@type key_tag: int
@ivar signer: the signer
@type signer: dns.name.Name object
@ivar signature: the signature
@type signature: string"""
__slots__ = ['type_covered', 'algorithm', 'labels', 'original_ttl',
'expiration', 'inception', 'key_tag', 'signer',
'signature']
def __init__(self, rdclass, rdtype, type_covered, algorithm, labels,
original_ttl, expiration, inception, key_tag, signer,
signature):
super(RRSIG, self).__init__(rdclass, rdtype)
self.type_covered = type_covered
self.algorithm = algorithm
self.labels = labels
self.original_ttl = original_ttl
self.expiration = expiration
self.inception = inception
self.key_tag = key_tag
self.signer = signer
self.signature = signature
def covers(self):
return self.type_covered
def to_text(self, origin=None, relativize=True, **kw):
return '%s %d %d %d %s %s %d %s %s' % (
dns.rdatatype.to_text(self.type_covered),
self.algorithm,
self.labels,
self.original_ttl,
posixtime_to_sigtime(self.expiration),
posixtime_to_sigtime(self.inception),
self.key_tag,
self.signer.choose_relativity(origin, relativize),
dns.rdata._base64ify(self.signature)
)
@classmethod
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
type_covered = dns.rdatatype.from_text(tok.get_string())
algorithm = dns.dnssec.algorithm_from_text(tok.get_string())
labels = tok.get_int()
original_ttl = tok.get_ttl()
expiration = sigtime_to_posixtime(tok.get_string())
inception = sigtime_to_posixtime(tok.get_string())
key_tag = tok.get_int()
signer = tok.get_name()
signer = signer.choose_relativity(origin, relativize)
chunks = []
while 1:
t = tok.get().unescape()
if t.is_eol_or_eof():
break
if not t.is_identifier():
raise dns.exception.SyntaxError
chunks.append(t.value.encode())
b64 = b''.join(chunks)
signature = base64.b64decode(b64)
return cls(rdclass, rdtype, type_covered, algorithm, labels,
original_ttl, expiration, inception, key_tag, signer,
signature)
def to_wire(self, file, compress=None, origin=None):
header = struct.pack('!HBBIIIH', self.type_covered,
self.algorithm, self.labels,
self.original_ttl, self.expiration,
self.inception, self.key_tag)
file.write(header)
self.signer.to_wire(file, None, origin)
file.write(self.signature)
@classmethod
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
header = struct.unpack('!HBBIIIH', wire[current: current + 18])
current += 18
rdlen -= 18
(signer, cused) = dns.name.from_wire(wire[: current + rdlen], current)
current += cused
rdlen -= cused
if origin is not None:
signer = signer.relativize(origin)
signature = wire[current: current + rdlen].unwrap()
return cls(rdclass, rdtype, header[0], header[1], header[2],
header[3], header[4], header[5], header[6], signer,
signature)
def choose_relativity(self, origin=None, relativize=True):
self.signer = self.signer.choose_relativity(origin, relativize)

View file

@ -0,0 +1,23 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import dns.rdtypes.mxbase
class RT(dns.rdtypes.mxbase.UncompressedDowncasingMX):
"""RT record"""

View file

@ -0,0 +1,116 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import struct
import dns.exception
import dns.rdata
import dns.name
class SOA(dns.rdata.Rdata):
"""SOA record
@ivar mname: the SOA MNAME (master name) field
@type mname: dns.name.Name object
@ivar rname: the SOA RNAME (responsible name) field
@type rname: dns.name.Name object
@ivar serial: The zone's serial number
@type serial: int
@ivar refresh: The zone's refresh value (in seconds)
@type refresh: int
@ivar retry: The zone's retry value (in seconds)
@type retry: int
@ivar expire: The zone's expiration value (in seconds)
@type expire: int
@ivar minimum: The zone's negative caching time (in seconds, called
"minimum" for historical reasons)
@type minimum: int
@see: RFC 1035"""
__slots__ = ['mname', 'rname', 'serial', 'refresh', 'retry', 'expire',
'minimum']
def __init__(self, rdclass, rdtype, mname, rname, serial, refresh, retry,
expire, minimum):
super(SOA, self).__init__(rdclass, rdtype)
self.mname = mname
self.rname = rname
self.serial = serial
self.refresh = refresh
self.retry = retry
self.expire = expire
self.minimum = minimum
def to_text(self, origin=None, relativize=True, **kw):
mname = self.mname.choose_relativity(origin, relativize)
rname = self.rname.choose_relativity(origin, relativize)
return '%s %s %d %d %d %d %d' % (
mname, rname, self.serial, self.refresh, self.retry,
self.expire, self.minimum)
@classmethod
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
mname = tok.get_name()
rname = tok.get_name()
mname = mname.choose_relativity(origin, relativize)
rname = rname.choose_relativity(origin, relativize)
serial = tok.get_uint32()
refresh = tok.get_ttl()
retry = tok.get_ttl()
expire = tok.get_ttl()
minimum = tok.get_ttl()
tok.get_eol()
return cls(rdclass, rdtype, mname, rname, serial, refresh, retry,
expire, minimum)
def to_wire(self, file, compress=None, origin=None):
self.mname.to_wire(file, compress, origin)
self.rname.to_wire(file, compress, origin)
five_ints = struct.pack('!IIIII', self.serial, self.refresh,
self.retry, self.expire, self.minimum)
file.write(five_ints)
def to_digestable(self, origin=None):
return self.mname.to_digestable(origin) + \
self.rname.to_digestable(origin) + \
struct.pack('!IIIII', self.serial, self.refresh,
self.retry, self.expire, self.minimum)
@classmethod
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
(mname, cused) = dns.name.from_wire(wire[: current + rdlen], current)
current += cused
rdlen -= cused
(rname, cused) = dns.name.from_wire(wire[: current + rdlen], current)
current += cused
rdlen -= cused
if rdlen != 20:
raise dns.exception.FormError
five_ints = struct.unpack('!IIIII',
wire[current: current + rdlen])
if origin is not None:
mname = mname.relativize(origin)
rname = rname.relativize(origin)
return cls(rdclass, rdtype, mname, rname,
five_ints[0], five_ints[1], five_ints[2], five_ints[3],
five_ints[4])
def choose_relativity(self, origin=None, relativize=True):
self.mname = self.mname.choose_relativity(origin, relativize)
self.rname = self.rname.choose_relativity(origin, relativize)

View file

@ -0,0 +1,25 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import dns.rdtypes.txtbase
class SPF(dns.rdtypes.txtbase.TXTBase):
"""SPF record
@see: RFC 4408"""

View file

@ -0,0 +1,79 @@
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2005-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import struct
import binascii
import dns.rdata
import dns.rdatatype
class SSHFP(dns.rdata.Rdata):
"""SSHFP record
@ivar algorithm: the algorithm
@type algorithm: int
@ivar fp_type: the digest type
@type fp_type: int
@ivar fingerprint: the fingerprint
@type fingerprint: string
@see: draft-ietf-secsh-dns-05.txt"""
__slots__ = ['algorithm', 'fp_type', 'fingerprint']
def __init__(self, rdclass, rdtype, algorithm, fp_type,
fingerprint):
super(SSHFP, self).__init__(rdclass, rdtype)
self.algorithm = algorithm
self.fp_type = fp_type
self.fingerprint = fingerprint
def to_text(self, origin=None, relativize=True, **kw):
return '%d %d %s' % (self.algorithm,
self.fp_type,
dns.rdata._hexify(self.fingerprint,
chunksize=128))
@classmethod
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
algorithm = tok.get_uint8()
fp_type = tok.get_uint8()
chunks = []
while 1:
t = tok.get().unescape()
if t.is_eol_or_eof():
break
if not t.is_identifier():
raise dns.exception.SyntaxError
chunks.append(t.value.encode())
fingerprint = b''.join(chunks)
fingerprint = binascii.unhexlify(fingerprint)
return cls(rdclass, rdtype, algorithm, fp_type, fingerprint)
def to_wire(self, file, compress=None, origin=None):
header = struct.pack("!BB", self.algorithm, self.fp_type)
file.write(header)
file.write(self.fingerprint)
@classmethod
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
header = struct.unpack("!BB", wire[current: current + 2])
current += 2
rdlen -= 2
fingerprint = wire[current: current + rdlen].unwrap()
return cls(rdclass, rdtype, header[0], header[1], fingerprint)

Some files were not shown because too many files have changed in this diff Show more